code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def do_some_work(
self,
work_dict):
"""do_some_work
:param work_dict: dictionary for key/values
"""
label = "do_some_work"
log.info(("task - {} - start "
"work_dict={}")
.format(label,
work_dict))
ret_data = {
"job_results": ("some response key={}").format(
str(uuid.uuid4()))
}
log.info(("task - {} - result={} done")
.format(
ret_data,
label))
return ret_data | def function[do_some_work, parameter[self, work_dict]]:
constant[do_some_work
:param work_dict: dictionary for key/values
]
variable[label] assign[=] constant[do_some_work]
call[name[log].info, parameter[call[constant[task - {} - start work_dict={}].format, parameter[name[label], name[work_dict]]]]]
variable[ret_data] assign[=] dictionary[[<ast.Constant object at 0x7da18eb545e0>], [<ast.Call object at 0x7da18eb57100>]]
call[name[log].info, parameter[call[constant[task - {} - result={} done].format, parameter[name[ret_data], name[label]]]]]
return[name[ret_data]] | keyword[def] identifier[do_some_work] (
identifier[self] ,
identifier[work_dict] ):
literal[string]
identifier[label] = literal[string]
identifier[log] . identifier[info] (( literal[string]
literal[string] )
. identifier[format] ( identifier[label] ,
identifier[work_dict] ))
identifier[ret_data] ={
literal[string] :( literal[string] ). identifier[format] (
identifier[str] ( identifier[uuid] . identifier[uuid4] ()))
}
identifier[log] . identifier[info] (( literal[string] )
. identifier[format] (
identifier[ret_data] ,
identifier[label] ))
keyword[return] identifier[ret_data] | def do_some_work(self, work_dict):
"""do_some_work
:param work_dict: dictionary for key/values
"""
label = 'do_some_work'
log.info('task - {} - start work_dict={}'.format(label, work_dict))
ret_data = {'job_results': 'some response key={}'.format(str(uuid.uuid4()))}
log.info('task - {} - result={} done'.format(ret_data, label))
return ret_data |
def get_object(table, id=None, condition=None, cache=False, fields=None, use_local=False,
engine_name=None, session=None):
"""
Get obj in Local.object_caches first and also use get(cache=True) function if
not found in object_caches
"""
from uliweb import functions, settings
model = get_model(table, engine_name)
#if id is an object of Model, so get the real id value
if isinstance(id, Model):
return id
if cache:
if use_local:
s = get_session(session)
key = get_object_id(s.engine_name, model.tablename, id)
value = s.get_local_cache(key)
if value:
return value
obj = model.get(id, condition=condition, fields=fields, cache=True)
if use_local:
value = s.get_local_cache(key, obj)
else:
obj = model.get(id, condition=condition, fields=fields)
return obj | def function[get_object, parameter[table, id, condition, cache, fields, use_local, engine_name, session]]:
constant[
Get obj in Local.object_caches first and also use get(cache=True) function if
not found in object_caches
]
from relative_module[uliweb] import module[functions], module[settings]
variable[model] assign[=] call[name[get_model], parameter[name[table], name[engine_name]]]
if call[name[isinstance], parameter[name[id], name[Model]]] begin[:]
return[name[id]]
if name[cache] begin[:]
if name[use_local] begin[:]
variable[s] assign[=] call[name[get_session], parameter[name[session]]]
variable[key] assign[=] call[name[get_object_id], parameter[name[s].engine_name, name[model].tablename, name[id]]]
variable[value] assign[=] call[name[s].get_local_cache, parameter[name[key]]]
if name[value] begin[:]
return[name[value]]
variable[obj] assign[=] call[name[model].get, parameter[name[id]]]
if name[use_local] begin[:]
variable[value] assign[=] call[name[s].get_local_cache, parameter[name[key], name[obj]]]
return[name[obj]] | keyword[def] identifier[get_object] ( identifier[table] , identifier[id] = keyword[None] , identifier[condition] = keyword[None] , identifier[cache] = keyword[False] , identifier[fields] = keyword[None] , identifier[use_local] = keyword[False] ,
identifier[engine_name] = keyword[None] , identifier[session] = keyword[None] ):
literal[string]
keyword[from] identifier[uliweb] keyword[import] identifier[functions] , identifier[settings]
identifier[model] = identifier[get_model] ( identifier[table] , identifier[engine_name] )
keyword[if] identifier[isinstance] ( identifier[id] , identifier[Model] ):
keyword[return] identifier[id]
keyword[if] identifier[cache] :
keyword[if] identifier[use_local] :
identifier[s] = identifier[get_session] ( identifier[session] )
identifier[key] = identifier[get_object_id] ( identifier[s] . identifier[engine_name] , identifier[model] . identifier[tablename] , identifier[id] )
identifier[value] = identifier[s] . identifier[get_local_cache] ( identifier[key] )
keyword[if] identifier[value] :
keyword[return] identifier[value]
identifier[obj] = identifier[model] . identifier[get] ( identifier[id] , identifier[condition] = identifier[condition] , identifier[fields] = identifier[fields] , identifier[cache] = keyword[True] )
keyword[if] identifier[use_local] :
identifier[value] = identifier[s] . identifier[get_local_cache] ( identifier[key] , identifier[obj] )
keyword[else] :
identifier[obj] = identifier[model] . identifier[get] ( identifier[id] , identifier[condition] = identifier[condition] , identifier[fields] = identifier[fields] )
keyword[return] identifier[obj] | def get_object(table, id=None, condition=None, cache=False, fields=None, use_local=False, engine_name=None, session=None):
"""
Get obj in Local.object_caches first and also use get(cache=True) function if
not found in object_caches
"""
from uliweb import functions, settings
model = get_model(table, engine_name)
#if id is an object of Model, so get the real id value
if isinstance(id, Model):
return id # depends on [control=['if'], data=[]]
if cache:
if use_local:
s = get_session(session)
key = get_object_id(s.engine_name, model.tablename, id)
value = s.get_local_cache(key)
if value:
return value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
obj = model.get(id, condition=condition, fields=fields, cache=True)
if use_local:
value = s.get_local_cache(key, obj) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
obj = model.get(id, condition=condition, fields=fields)
return obj |
def roles_accepted(*accepted_rolenames):
"""
This decorator ensures that any uses accessing the decorated route have one
of the needed roles to access it. If an @auth_required decorator is not
supplied already, this decorator will implicitly check @auth_required first
"""
def decorator(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
role_set = set([str(n) for n in accepted_rolenames])
_verify_and_add_jwt()
try:
MissingRoleError.require_condition(
not current_rolenames().isdisjoint(role_set),
"This endpoint requires one of the following roles: {}",
[', '.join(role_set)],
)
return method(*args, **kwargs)
finally:
remove_jwt_data_from_app_context()
return wrapper
return decorator | def function[roles_accepted, parameter[]]:
constant[
This decorator ensures that any uses accessing the decorated route have one
of the needed roles to access it. If an @auth_required decorator is not
supplied already, this decorator will implicitly check @auth_required first
]
def function[decorator, parameter[method]]:
def function[wrapper, parameter[]]:
variable[role_set] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da20e74afe0>]]
call[name[_verify_and_add_jwt], parameter[]]
<ast.Try object at 0x7da204347370>
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[roles_accepted] (* identifier[accepted_rolenames] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[method] ):
@ identifier[functools] . identifier[wraps] ( identifier[method] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[role_set] = identifier[set] ([ identifier[str] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[accepted_rolenames] ])
identifier[_verify_and_add_jwt] ()
keyword[try] :
identifier[MissingRoleError] . identifier[require_condition] (
keyword[not] identifier[current_rolenames] (). identifier[isdisjoint] ( identifier[role_set] ),
literal[string] ,
[ literal[string] . identifier[join] ( identifier[role_set] )],
)
keyword[return] identifier[method] (* identifier[args] ,** identifier[kwargs] )
keyword[finally] :
identifier[remove_jwt_data_from_app_context] ()
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def roles_accepted(*accepted_rolenames):
"""
This decorator ensures that any uses accessing the decorated route have one
of the needed roles to access it. If an @auth_required decorator is not
supplied already, this decorator will implicitly check @auth_required first
"""
def decorator(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
role_set = set([str(n) for n in accepted_rolenames])
_verify_and_add_jwt()
try:
MissingRoleError.require_condition(not current_rolenames().isdisjoint(role_set), 'This endpoint requires one of the following roles: {}', [', '.join(role_set)])
return method(*args, **kwargs) # depends on [control=['try'], data=[]]
finally:
remove_jwt_data_from_app_context()
return wrapper
return decorator |
def copy(self,
fitness_function=None,
decode_function=None,
fitness_args=None,
decode_args=None,
fitness_kwargs=None,
decode_kwargs=None):
"""Return a copy of this problem.
Optionally replace this problems arguments with those passed in.
"""
if fitness_function is None:
fitness_function = self._fitness_function
if decode_function is None:
decode_function = self._decode_function
if fitness_args is None:
fitness_args = self._fitness_args
if decode_args is None:
decode_args = self._decode_args
if fitness_kwargs is None:
fitness_kwargs = self._fitness_kwargs
if decode_kwargs is None:
decode_kwargs = self._decode_kwargs
return Problem(
fitness_function,
decode_function=decode_function,
fitness_args=fitness_args,
decode_args=decode_args,
fitness_kwargs=fitness_kwargs,
decode_kwargs=decode_kwargs) | def function[copy, parameter[self, fitness_function, decode_function, fitness_args, decode_args, fitness_kwargs, decode_kwargs]]:
constant[Return a copy of this problem.
Optionally replace this problems arguments with those passed in.
]
if compare[name[fitness_function] is constant[None]] begin[:]
variable[fitness_function] assign[=] name[self]._fitness_function
if compare[name[decode_function] is constant[None]] begin[:]
variable[decode_function] assign[=] name[self]._decode_function
if compare[name[fitness_args] is constant[None]] begin[:]
variable[fitness_args] assign[=] name[self]._fitness_args
if compare[name[decode_args] is constant[None]] begin[:]
variable[decode_args] assign[=] name[self]._decode_args
if compare[name[fitness_kwargs] is constant[None]] begin[:]
variable[fitness_kwargs] assign[=] name[self]._fitness_kwargs
if compare[name[decode_kwargs] is constant[None]] begin[:]
variable[decode_kwargs] assign[=] name[self]._decode_kwargs
return[call[name[Problem], parameter[name[fitness_function]]]] | keyword[def] identifier[copy] ( identifier[self] ,
identifier[fitness_function] = keyword[None] ,
identifier[decode_function] = keyword[None] ,
identifier[fitness_args] = keyword[None] ,
identifier[decode_args] = keyword[None] ,
identifier[fitness_kwargs] = keyword[None] ,
identifier[decode_kwargs] = keyword[None] ):
literal[string]
keyword[if] identifier[fitness_function] keyword[is] keyword[None] :
identifier[fitness_function] = identifier[self] . identifier[_fitness_function]
keyword[if] identifier[decode_function] keyword[is] keyword[None] :
identifier[decode_function] = identifier[self] . identifier[_decode_function]
keyword[if] identifier[fitness_args] keyword[is] keyword[None] :
identifier[fitness_args] = identifier[self] . identifier[_fitness_args]
keyword[if] identifier[decode_args] keyword[is] keyword[None] :
identifier[decode_args] = identifier[self] . identifier[_decode_args]
keyword[if] identifier[fitness_kwargs] keyword[is] keyword[None] :
identifier[fitness_kwargs] = identifier[self] . identifier[_fitness_kwargs]
keyword[if] identifier[decode_kwargs] keyword[is] keyword[None] :
identifier[decode_kwargs] = identifier[self] . identifier[_decode_kwargs]
keyword[return] identifier[Problem] (
identifier[fitness_function] ,
identifier[decode_function] = identifier[decode_function] ,
identifier[fitness_args] = identifier[fitness_args] ,
identifier[decode_args] = identifier[decode_args] ,
identifier[fitness_kwargs] = identifier[fitness_kwargs] ,
identifier[decode_kwargs] = identifier[decode_kwargs] ) | def copy(self, fitness_function=None, decode_function=None, fitness_args=None, decode_args=None, fitness_kwargs=None, decode_kwargs=None):
"""Return a copy of this problem.
Optionally replace this problems arguments with those passed in.
"""
if fitness_function is None:
fitness_function = self._fitness_function # depends on [control=['if'], data=['fitness_function']]
if decode_function is None:
decode_function = self._decode_function # depends on [control=['if'], data=['decode_function']]
if fitness_args is None:
fitness_args = self._fitness_args # depends on [control=['if'], data=['fitness_args']]
if decode_args is None:
decode_args = self._decode_args # depends on [control=['if'], data=['decode_args']]
if fitness_kwargs is None:
fitness_kwargs = self._fitness_kwargs # depends on [control=['if'], data=['fitness_kwargs']]
if decode_kwargs is None:
decode_kwargs = self._decode_kwargs # depends on [control=['if'], data=['decode_kwargs']]
return Problem(fitness_function, decode_function=decode_function, fitness_args=fitness_args, decode_args=decode_args, fitness_kwargs=fitness_kwargs, decode_kwargs=decode_kwargs) |
def from_envvar(self, variable_name):
"""Load a configuration from an environment variable pointing to
a configuration file.
:param variable_name: name of the environment variable
:return: bool. ``True`` if able to load config, ``False`` otherwise.
"""
config_file = os.environ.get(variable_name)
if not config_file:
raise RuntimeError(
"The environment variable %r is not set and "
"thus configuration could not be loaded." % variable_name
)
return self.from_pyfile(config_file) | def function[from_envvar, parameter[self, variable_name]]:
constant[Load a configuration from an environment variable pointing to
a configuration file.
:param variable_name: name of the environment variable
:return: bool. ``True`` if able to load config, ``False`` otherwise.
]
variable[config_file] assign[=] call[name[os].environ.get, parameter[name[variable_name]]]
if <ast.UnaryOp object at 0x7da1b1fe4070> begin[:]
<ast.Raise object at 0x7da1b1fe4970>
return[call[name[self].from_pyfile, parameter[name[config_file]]]] | keyword[def] identifier[from_envvar] ( identifier[self] , identifier[variable_name] ):
literal[string]
identifier[config_file] = identifier[os] . identifier[environ] . identifier[get] ( identifier[variable_name] )
keyword[if] keyword[not] identifier[config_file] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] % identifier[variable_name]
)
keyword[return] identifier[self] . identifier[from_pyfile] ( identifier[config_file] ) | def from_envvar(self, variable_name):
"""Load a configuration from an environment variable pointing to
a configuration file.
:param variable_name: name of the environment variable
:return: bool. ``True`` if able to load config, ``False`` otherwise.
"""
config_file = os.environ.get(variable_name)
if not config_file:
raise RuntimeError('The environment variable %r is not set and thus configuration could not be loaded.' % variable_name) # depends on [control=['if'], data=[]]
return self.from_pyfile(config_file) |
def add_value(
self, secret_resource_name, secret_value_resource_name, name, value=None, custom_headers=None, raw=False, **operation_config):
"""Adds the specified value as a new version of the specified secret
resource.
Creates a new value of the specified secret resource. The name of the
value is typically the version identifier. Once created the value
cannot be changed.
:param secret_resource_name: The name of the secret resource.
:type secret_resource_name: str
:param secret_value_resource_name: The name of the secret resource
value which is typically the version identifier for the value.
:type secret_value_resource_name: str
:param name: Version identifier of the secret value.
:type name: str
:param value: The actual value of the secret.
:type value: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SecretValueResourceDescription or ClientRawResponse if
raw=true
:rtype: ~azure.servicefabric.models.SecretValueResourceDescription or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>`
"""
secret_value_resource_description = models.SecretValueResourceDescription(name=name, value=value)
# Construct URL
url = self.add_value.metadata['url']
path_format_arguments = {
'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True),
'secretValueResourceName': self._serialize.url("secret_value_resource_name", secret_value_resource_name, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(secret_value_resource_description, 'SecretValueResourceDescription')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
raise models.FabricErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretValueResourceDescription', response)
if response.status_code == 201:
deserialized = self._deserialize('SecretValueResourceDescription', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized | def function[add_value, parameter[self, secret_resource_name, secret_value_resource_name, name, value, custom_headers, raw]]:
constant[Adds the specified value as a new version of the specified secret
resource.
Creates a new value of the specified secret resource. The name of the
value is typically the version identifier. Once created the value
cannot be changed.
:param secret_resource_name: The name of the secret resource.
:type secret_resource_name: str
:param secret_value_resource_name: The name of the secret resource
value which is typically the version identifier for the value.
:type secret_value_resource_name: str
:param name: Version identifier of the secret value.
:type name: str
:param value: The actual value of the secret.
:type value: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SecretValueResourceDescription or ClientRawResponse if
raw=true
:rtype: ~azure.servicefabric.models.SecretValueResourceDescription or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>`
]
variable[secret_value_resource_description] assign[=] call[name[models].SecretValueResourceDescription, parameter[]]
variable[url] assign[=] call[name[self].add_value.metadata][constant[url]]
variable[path_format_arguments] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57040>, <ast.Constant object at 0x7da18eb55240>], [<ast.Call object at 0x7da18eb564a0>, <ast.Call object at 0x7da18eb56860>]]
variable[url] assign[=] call[name[self]._client.format_url, parameter[name[url]]]
variable[query_parameters] assign[=] dictionary[[], []]
call[name[query_parameters]][constant[api-version]] assign[=] call[name[self]._serialize.query, parameter[constant[self.api_version], name[self].api_version, constant[str]]]
variable[header_parameters] assign[=] dictionary[[], []]
call[name[header_parameters]][constant[Accept]] assign[=] constant[application/json]
call[name[header_parameters]][constant[Content-Type]] assign[=] constant[application/json; charset=utf-8]
if name[custom_headers] begin[:]
call[name[header_parameters].update, parameter[name[custom_headers]]]
variable[body_content] assign[=] call[name[self]._serialize.body, parameter[name[secret_value_resource_description], constant[SecretValueResourceDescription]]]
variable[request] assign[=] call[name[self]._client.put, parameter[name[url], name[query_parameters], name[header_parameters], name[body_content]]]
variable[response] assign[=] call[name[self]._client.send, parameter[name[request]]]
if compare[name[response].status_code <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da20e9b3d30>, <ast.Constant object at 0x7da20e9b0e80>, <ast.Constant object at 0x7da20e9b12d0>]]] begin[:]
<ast.Raise object at 0x7da20e9b35b0>
variable[deserialized] assign[=] constant[None]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[deserialized] assign[=] call[name[self]._deserialize, parameter[constant[SecretValueResourceDescription], name[response]]]
if compare[name[response].status_code equal[==] constant[201]] begin[:]
variable[deserialized] assign[=] call[name[self]._deserialize, parameter[constant[SecretValueResourceDescription], name[response]]]
if name[raw] begin[:]
variable[client_raw_response] assign[=] call[name[ClientRawResponse], parameter[name[deserialized], name[response]]]
return[name[client_raw_response]]
return[name[deserialized]] | keyword[def] identifier[add_value] (
identifier[self] , identifier[secret_resource_name] , identifier[secret_value_resource_name] , identifier[name] , identifier[value] = keyword[None] , identifier[custom_headers] = keyword[None] , identifier[raw] = keyword[False] ,** identifier[operation_config] ):
literal[string]
identifier[secret_value_resource_description] = identifier[models] . identifier[SecretValueResourceDescription] ( identifier[name] = identifier[name] , identifier[value] = identifier[value] )
identifier[url] = identifier[self] . identifier[add_value] . identifier[metadata] [ literal[string] ]
identifier[path_format_arguments] ={
literal[string] : identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[secret_resource_name] , literal[string] , identifier[skip_quote] = keyword[True] ),
literal[string] : identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[secret_value_resource_name] , literal[string] , identifier[skip_quote] = keyword[True] )
}
identifier[url] = identifier[self] . identifier[_client] . identifier[format_url] ( identifier[url] ,** identifier[path_format_arguments] )
identifier[query_parameters] ={}
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[self] . identifier[api_version] , literal[string] )
identifier[header_parameters] ={}
identifier[header_parameters] [ literal[string] ]= literal[string]
identifier[header_parameters] [ literal[string] ]= literal[string]
keyword[if] identifier[custom_headers] :
identifier[header_parameters] . identifier[update] ( identifier[custom_headers] )
identifier[body_content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[secret_value_resource_description] , literal[string] )
identifier[request] = identifier[self] . identifier[_client] . identifier[put] ( identifier[url] , identifier[query_parameters] , identifier[header_parameters] , identifier[body_content] )
identifier[response] = identifier[self] . identifier[_client] . identifier[send] ( identifier[request] , identifier[stream] = keyword[False] ,** identifier[operation_config] )
keyword[if] identifier[response] . identifier[status_code] keyword[not] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[raise] identifier[models] . identifier[FabricErrorException] ( identifier[self] . identifier[_deserialize] , identifier[response] )
identifier[deserialized] = keyword[None]
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[deserialized] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[deserialized] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
keyword[if] identifier[raw] :
identifier[client_raw_response] = identifier[ClientRawResponse] ( identifier[deserialized] , identifier[response] )
keyword[return] identifier[client_raw_response]
keyword[return] identifier[deserialized] | def add_value(self, secret_resource_name, secret_value_resource_name, name, value=None, custom_headers=None, raw=False, **operation_config):
"""Adds the specified value as a new version of the specified secret
resource.
Creates a new value of the specified secret resource. The name of the
value is typically the version identifier. Once created the value
cannot be changed.
:param secret_resource_name: The name of the secret resource.
:type secret_resource_name: str
:param secret_value_resource_name: The name of the secret resource
value which is typically the version identifier for the value.
:type secret_value_resource_name: str
:param name: Version identifier of the secret value.
:type name: str
:param value: The actual value of the secret.
:type value: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SecretValueResourceDescription or ClientRawResponse if
raw=true
:rtype: ~azure.servicefabric.models.SecretValueResourceDescription or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>`
"""
secret_value_resource_description = models.SecretValueResourceDescription(name=name, value=value)
# Construct URL
url = self.add_value.metadata['url']
path_format_arguments = {'secretResourceName': self._serialize.url('secret_resource_name', secret_resource_name, 'str', skip_quote=True), 'secretValueResourceName': self._serialize.url('secret_value_resource_name', secret_value_resource_name, 'str', skip_quote=True)}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('self.api_version', self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers) # depends on [control=['if'], data=[]]
# Construct body
body_content = self._serialize.body(secret_value_resource_description, 'SecretValueResourceDescription')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
raise models.FabricErrorException(self._deserialize, response) # depends on [control=['if'], data=[]]
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretValueResourceDescription', response) # depends on [control=['if'], data=[]]
if response.status_code == 201:
deserialized = self._deserialize('SecretValueResourceDescription', response) # depends on [control=['if'], data=[]]
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response # depends on [control=['if'], data=[]]
return deserialized |
def calc_osc_accels(self, osc_freqs, osc_damping=0.05, tf=None):
"""Compute the pseudo-acceleration spectral response of an oscillator
with a specific frequency and damping.
Parameters
----------
osc_freq : float
Frequency of the oscillator (Hz).
osc_damping : float
Fractional damping of the oscillator (dec). For example, 0.05 for a
damping ratio of 5%.
tf : array_like, optional
Transfer function to be applied to motion prior calculation of the
oscillator response.
Returns
-------
spec_accels : :class:`numpy.ndarray`
Peak pseudo-spectral acceleration of the oscillator
"""
if tf is None:
tf = np.ones_like(self.freqs)
else:
tf = np.asarray(tf).astype(complex)
resp = np.array([
self.calc_peak(tf * self._calc_sdof_tf(of, osc_damping))
for of in osc_freqs
])
return resp | def function[calc_osc_accels, parameter[self, osc_freqs, osc_damping, tf]]:
constant[Compute the pseudo-acceleration spectral response of an oscillator
with a specific frequency and damping.
Parameters
----------
osc_freq : float
Frequency of the oscillator (Hz).
osc_damping : float
Fractional damping of the oscillator (dec). For example, 0.05 for a
damping ratio of 5%.
tf : array_like, optional
Transfer function to be applied to motion prior calculation of the
oscillator response.
Returns
-------
spec_accels : :class:`numpy.ndarray`
Peak pseudo-spectral acceleration of the oscillator
]
if compare[name[tf] is constant[None]] begin[:]
variable[tf] assign[=] call[name[np].ones_like, parameter[name[self].freqs]]
variable[resp] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c6aad40>]]
return[name[resp]] | keyword[def] identifier[calc_osc_accels] ( identifier[self] , identifier[osc_freqs] , identifier[osc_damping] = literal[int] , identifier[tf] = keyword[None] ):
literal[string]
keyword[if] identifier[tf] keyword[is] keyword[None] :
identifier[tf] = identifier[np] . identifier[ones_like] ( identifier[self] . identifier[freqs] )
keyword[else] :
identifier[tf] = identifier[np] . identifier[asarray] ( identifier[tf] ). identifier[astype] ( identifier[complex] )
identifier[resp] = identifier[np] . identifier[array] ([
identifier[self] . identifier[calc_peak] ( identifier[tf] * identifier[self] . identifier[_calc_sdof_tf] ( identifier[of] , identifier[osc_damping] ))
keyword[for] identifier[of] keyword[in] identifier[osc_freqs]
])
keyword[return] identifier[resp] | def calc_osc_accels(self, osc_freqs, osc_damping=0.05, tf=None):
"""Compute the pseudo-acceleration spectral response of an oscillator
with a specific frequency and damping.
Parameters
----------
osc_freq : float
Frequency of the oscillator (Hz).
osc_damping : float
Fractional damping of the oscillator (dec). For example, 0.05 for a
damping ratio of 5%.
tf : array_like, optional
Transfer function to be applied to motion prior calculation of the
oscillator response.
Returns
-------
spec_accels : :class:`numpy.ndarray`
Peak pseudo-spectral acceleration of the oscillator
"""
if tf is None:
tf = np.ones_like(self.freqs) # depends on [control=['if'], data=['tf']]
else:
tf = np.asarray(tf).astype(complex)
resp = np.array([self.calc_peak(tf * self._calc_sdof_tf(of, osc_damping)) for of in osc_freqs])
return resp |
def S2L(dd, mm, yy, timeZone=7):
'''def S2L(dd, mm, yy, timeZone = 7): Convert solar date dd/mm/yyyy to
the corresponding lunar date.'''
dayNumber = jdFromDate(dd, mm, yy)
k = int((dayNumber - 2415021.076998695) / 29.530588853)
monthStart = getNewMoonDay(k + 1, timeZone)
if (monthStart > dayNumber):
monthStart = getNewMoonDay(k, timeZone)
# alert(dayNumber + " -> " + monthStart)
a11 = getLunarMonth11(yy, timeZone)
b11 = a11
if (a11 >= monthStart):
lunarYear = yy
a11 = getLunarMonth11(yy - 1, timeZone)
else:
lunarYear = yy + 1
b11 = getLunarMonth11(yy + 1, timeZone)
lunarDay = dayNumber - monthStart + 1
diff = int((monthStart - a11) / 29.)
lunarLeap = 0
lunarMonth = diff + 11
if (b11 - a11 > 365):
leapMonthDiff = \
getLeapMonthOffset(a11, timeZone)
if (diff >= leapMonthDiff):
lunarMonth = diff + 10
if (diff == leapMonthDiff):
lunarLeap = 1
if (lunarMonth > 12):
lunarMonth = lunarMonth - 12
if (lunarMonth >= 11 and diff < 4):
lunarYear -= 1
# print [lunarDay, lunarMonth, lunarYear, lunarLeap]
return \
[lunarDay, lunarMonth, lunarYear, lunarLeap] | def function[S2L, parameter[dd, mm, yy, timeZone]]:
constant[def S2L(dd, mm, yy, timeZone = 7): Convert solar date dd/mm/yyyy to
the corresponding lunar date.]
variable[dayNumber] assign[=] call[name[jdFromDate], parameter[name[dd], name[mm], name[yy]]]
variable[k] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[dayNumber] - constant[2415021.076998695]] / constant[29.530588853]]]]
variable[monthStart] assign[=] call[name[getNewMoonDay], parameter[binary_operation[name[k] + constant[1]], name[timeZone]]]
if compare[name[monthStart] greater[>] name[dayNumber]] begin[:]
variable[monthStart] assign[=] call[name[getNewMoonDay], parameter[name[k], name[timeZone]]]
variable[a11] assign[=] call[name[getLunarMonth11], parameter[name[yy], name[timeZone]]]
variable[b11] assign[=] name[a11]
if compare[name[a11] greater_or_equal[>=] name[monthStart]] begin[:]
variable[lunarYear] assign[=] name[yy]
variable[a11] assign[=] call[name[getLunarMonth11], parameter[binary_operation[name[yy] - constant[1]], name[timeZone]]]
variable[lunarDay] assign[=] binary_operation[binary_operation[name[dayNumber] - name[monthStart]] + constant[1]]
variable[diff] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[monthStart] - name[a11]] / constant[29.0]]]]
variable[lunarLeap] assign[=] constant[0]
variable[lunarMonth] assign[=] binary_operation[name[diff] + constant[11]]
if compare[binary_operation[name[b11] - name[a11]] greater[>] constant[365]] begin[:]
variable[leapMonthDiff] assign[=] call[name[getLeapMonthOffset], parameter[name[a11], name[timeZone]]]
if compare[name[diff] greater_or_equal[>=] name[leapMonthDiff]] begin[:]
variable[lunarMonth] assign[=] binary_operation[name[diff] + constant[10]]
if compare[name[diff] equal[==] name[leapMonthDiff]] begin[:]
variable[lunarLeap] assign[=] constant[1]
if compare[name[lunarMonth] greater[>] constant[12]] begin[:]
variable[lunarMonth] assign[=] binary_operation[name[lunarMonth] - constant[12]]
if <ast.BoolOp object at 0x7da18c4ceec0> begin[:]
<ast.AugAssign object at 0x7da18c4ce230>
return[list[[<ast.Name object at 0x7da18c4cdd50>, <ast.Name object at 0x7da18c4ccdf0>, <ast.Name object at 0x7da18c4cd7e0>, <ast.Name object at 0x7da18c4cff10>]]] | keyword[def] identifier[S2L] ( identifier[dd] , identifier[mm] , identifier[yy] , identifier[timeZone] = literal[int] ):
literal[string]
identifier[dayNumber] = identifier[jdFromDate] ( identifier[dd] , identifier[mm] , identifier[yy] )
identifier[k] = identifier[int] (( identifier[dayNumber] - literal[int] )/ literal[int] )
identifier[monthStart] = identifier[getNewMoonDay] ( identifier[k] + literal[int] , identifier[timeZone] )
keyword[if] ( identifier[monthStart] > identifier[dayNumber] ):
identifier[monthStart] = identifier[getNewMoonDay] ( identifier[k] , identifier[timeZone] )
identifier[a11] = identifier[getLunarMonth11] ( identifier[yy] , identifier[timeZone] )
identifier[b11] = identifier[a11]
keyword[if] ( identifier[a11] >= identifier[monthStart] ):
identifier[lunarYear] = identifier[yy]
identifier[a11] = identifier[getLunarMonth11] ( identifier[yy] - literal[int] , identifier[timeZone] )
keyword[else] :
identifier[lunarYear] = identifier[yy] + literal[int]
identifier[b11] = identifier[getLunarMonth11] ( identifier[yy] + literal[int] , identifier[timeZone] )
identifier[lunarDay] = identifier[dayNumber] - identifier[monthStart] + literal[int]
identifier[diff] = identifier[int] (( identifier[monthStart] - identifier[a11] )/ literal[int] )
identifier[lunarLeap] = literal[int]
identifier[lunarMonth] = identifier[diff] + literal[int]
keyword[if] ( identifier[b11] - identifier[a11] > literal[int] ):
identifier[leapMonthDiff] = identifier[getLeapMonthOffset] ( identifier[a11] , identifier[timeZone] )
keyword[if] ( identifier[diff] >= identifier[leapMonthDiff] ):
identifier[lunarMonth] = identifier[diff] + literal[int]
keyword[if] ( identifier[diff] == identifier[leapMonthDiff] ):
identifier[lunarLeap] = literal[int]
keyword[if] ( identifier[lunarMonth] > literal[int] ):
identifier[lunarMonth] = identifier[lunarMonth] - literal[int]
keyword[if] ( identifier[lunarMonth] >= literal[int] keyword[and] identifier[diff] < literal[int] ):
identifier[lunarYear] -= literal[int]
keyword[return] [ identifier[lunarDay] , identifier[lunarMonth] , identifier[lunarYear] , identifier[lunarLeap] ] | def S2L(dd, mm, yy, timeZone=7):
"""def S2L(dd, mm, yy, timeZone = 7): Convert solar date dd/mm/yyyy to
the corresponding lunar date."""
dayNumber = jdFromDate(dd, mm, yy)
k = int((dayNumber - 2415021.076998695) / 29.530588853)
monthStart = getNewMoonDay(k + 1, timeZone)
if monthStart > dayNumber:
monthStart = getNewMoonDay(k, timeZone) # depends on [control=['if'], data=['monthStart']]
# alert(dayNumber + " -> " + monthStart)
a11 = getLunarMonth11(yy, timeZone)
b11 = a11
if a11 >= monthStart:
lunarYear = yy
a11 = getLunarMonth11(yy - 1, timeZone) # depends on [control=['if'], data=['a11']]
else:
lunarYear = yy + 1
b11 = getLunarMonth11(yy + 1, timeZone)
lunarDay = dayNumber - monthStart + 1
diff = int((monthStart - a11) / 29.0)
lunarLeap = 0
lunarMonth = diff + 11
if b11 - a11 > 365:
leapMonthDiff = getLeapMonthOffset(a11, timeZone)
if diff >= leapMonthDiff:
lunarMonth = diff + 10
if diff == leapMonthDiff:
lunarLeap = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['diff', 'leapMonthDiff']] # depends on [control=['if'], data=[]]
if lunarMonth > 12:
lunarMonth = lunarMonth - 12 # depends on [control=['if'], data=['lunarMonth']]
if lunarMonth >= 11 and diff < 4:
lunarYear -= 1 # depends on [control=['if'], data=[]]
# print [lunarDay, lunarMonth, lunarYear, lunarLeap]
return [lunarDay, lunarMonth, lunarYear, lunarLeap] |
def _contains_yieldpoint(children):
"""Returns True if ``children`` contains any YieldPoints.
``children`` may be a dict or a list, as used by `MultiYieldPoint`
and `multi_future`.
"""
if isinstance(children, dict):
return any(isinstance(i, YieldPoint) for i in children.values())
if isinstance(children, list):
return any(isinstance(i, YieldPoint) for i in children)
return False | def function[_contains_yieldpoint, parameter[children]]:
constant[Returns True if ``children`` contains any YieldPoints.
``children`` may be a dict or a list, as used by `MultiYieldPoint`
and `multi_future`.
]
if call[name[isinstance], parameter[name[children], name[dict]]] begin[:]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1a5ef80>]]]
if call[name[isinstance], parameter[name[children], name[list]]] begin[:]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1ba9270>]]]
return[constant[False]] | keyword[def] identifier[_contains_yieldpoint] ( identifier[children] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[children] , identifier[dict] ):
keyword[return] identifier[any] ( identifier[isinstance] ( identifier[i] , identifier[YieldPoint] ) keyword[for] identifier[i] keyword[in] identifier[children] . identifier[values] ())
keyword[if] identifier[isinstance] ( identifier[children] , identifier[list] ):
keyword[return] identifier[any] ( identifier[isinstance] ( identifier[i] , identifier[YieldPoint] ) keyword[for] identifier[i] keyword[in] identifier[children] )
keyword[return] keyword[False] | def _contains_yieldpoint(children):
"""Returns True if ``children`` contains any YieldPoints.
``children`` may be a dict or a list, as used by `MultiYieldPoint`
and `multi_future`.
"""
if isinstance(children, dict):
return any((isinstance(i, YieldPoint) for i in children.values())) # depends on [control=['if'], data=[]]
if isinstance(children, list):
return any((isinstance(i, YieldPoint) for i in children)) # depends on [control=['if'], data=[]]
return False |
def crawl(self, *args, **kwargs):
'''
执行爬取操作,并阻塞直到爬取完成,返回结果数据。
此处考虑到 Scrapy 本身的并发特性,故通过临时文件方式做数据传递,
将临时路径传递到爬虫业务中,并在爬取结束后对文件进行读取、 JSON 反序列化,返回
:return: 返回符合接口定义的字典对象
:rtype: dict
'''
temp = tempfile.NamedTemporaryFile(mode='w+t')
try:
crawler = CrawlerScript()
# 调试时可指定明确日期参数,如:date='20180423'
crawler.crawl(output_file=temp.name, *args, **kwargs)
temp.seek(0)
content = json.loads(temp.read(), encoding='UTF-8')
finally:
temp.close()
print('抓取完毕!')
return content | def function[crawl, parameter[self]]:
constant[
执行爬取操作,并阻塞直到爬取完成,返回结果数据。
此处考虑到 Scrapy 本身的并发特性,故通过临时文件方式做数据传递,
将临时路径传递到爬虫业务中,并在爬取结束后对文件进行读取、 JSON 反序列化,返回
:return: 返回符合接口定义的字典对象
:rtype: dict
]
variable[temp] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
<ast.Try object at 0x7da2044c3490>
call[name[print], parameter[constant[抓取完毕!]]]
return[name[content]] | keyword[def] identifier[crawl] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[temp] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[mode] = literal[string] )
keyword[try] :
identifier[crawler] = identifier[CrawlerScript] ()
identifier[crawler] . identifier[crawl] ( identifier[output_file] = identifier[temp] . identifier[name] ,* identifier[args] ,** identifier[kwargs] )
identifier[temp] . identifier[seek] ( literal[int] )
identifier[content] = identifier[json] . identifier[loads] ( identifier[temp] . identifier[read] (), identifier[encoding] = literal[string] )
keyword[finally] :
identifier[temp] . identifier[close] ()
identifier[print] ( literal[string] )
keyword[return] identifier[content] | def crawl(self, *args, **kwargs):
"""
执行爬取操作,并阻塞直到爬取完成,返回结果数据。
此处考虑到 Scrapy 本身的并发特性,故通过临时文件方式做数据传递,
将临时路径传递到爬虫业务中,并在爬取结束后对文件进行读取、 JSON 反序列化,返回
:return: 返回符合接口定义的字典对象
:rtype: dict
"""
temp = tempfile.NamedTemporaryFile(mode='w+t')
try:
crawler = CrawlerScript()
# 调试时可指定明确日期参数,如:date='20180423'
crawler.crawl(*args, output_file=temp.name, **kwargs)
temp.seek(0)
content = json.loads(temp.read(), encoding='UTF-8') # depends on [control=['try'], data=[]]
finally:
temp.close()
print('抓取完毕!')
return content |
def FromBinary(cls, record_data, record_count=1):
"""Create an UpdateRecord subclass from binary record data.
This should be called with a binary record blob (NOT including the
record type header) and it will decode it into a ReflashControllerRecord.
Args:
record_data (bytearray): The raw record data that we wish to parse
into an UpdateRecord subclass NOT including its 8 byte record header.
record_count (int): The number of records included in record_data.
Raises:
ArgumentError: If the record_data is malformed and cannot be parsed.
Returns:
ReflashControllerRecord: The decoded reflash tile record.
"""
if len(record_data) < ReflashControllerRecord.RecordHeaderLength:
raise ArgumentError("Record was too short to contain a full reflash record header",
length=len(record_data), header_length=ReflashControllerRecord.RecordHeaderLength)
offset, data_length = struct.unpack_from("<LL", record_data)
bindata = record_data[ReflashControllerRecord.RecordHeaderLength:]
if len(bindata) != data_length:
raise ArgumentError("Embedded firmware length did not agree with actual length of embeded data",
length=len(bindata), embedded_length=data_length)
return ReflashControllerRecord(bindata, offset) | def function[FromBinary, parameter[cls, record_data, record_count]]:
constant[Create an UpdateRecord subclass from binary record data.
This should be called with a binary record blob (NOT including the
record type header) and it will decode it into a ReflashControllerRecord.
Args:
record_data (bytearray): The raw record data that we wish to parse
into an UpdateRecord subclass NOT including its 8 byte record header.
record_count (int): The number of records included in record_data.
Raises:
ArgumentError: If the record_data is malformed and cannot be parsed.
Returns:
ReflashControllerRecord: The decoded reflash tile record.
]
if compare[call[name[len], parameter[name[record_data]]] less[<] name[ReflashControllerRecord].RecordHeaderLength] begin[:]
<ast.Raise object at 0x7da2046227d0>
<ast.Tuple object at 0x7da204623460> assign[=] call[name[struct].unpack_from, parameter[constant[<LL], name[record_data]]]
variable[bindata] assign[=] call[name[record_data]][<ast.Slice object at 0x7da2046223b0>]
if compare[call[name[len], parameter[name[bindata]]] not_equal[!=] name[data_length]] begin[:]
<ast.Raise object at 0x7da2046220e0>
return[call[name[ReflashControllerRecord], parameter[name[bindata], name[offset]]]] | keyword[def] identifier[FromBinary] ( identifier[cls] , identifier[record_data] , identifier[record_count] = literal[int] ):
literal[string]
keyword[if] identifier[len] ( identifier[record_data] )< identifier[ReflashControllerRecord] . identifier[RecordHeaderLength] :
keyword[raise] identifier[ArgumentError] ( literal[string] ,
identifier[length] = identifier[len] ( identifier[record_data] ), identifier[header_length] = identifier[ReflashControllerRecord] . identifier[RecordHeaderLength] )
identifier[offset] , identifier[data_length] = identifier[struct] . identifier[unpack_from] ( literal[string] , identifier[record_data] )
identifier[bindata] = identifier[record_data] [ identifier[ReflashControllerRecord] . identifier[RecordHeaderLength] :]
keyword[if] identifier[len] ( identifier[bindata] )!= identifier[data_length] :
keyword[raise] identifier[ArgumentError] ( literal[string] ,
identifier[length] = identifier[len] ( identifier[bindata] ), identifier[embedded_length] = identifier[data_length] )
keyword[return] identifier[ReflashControllerRecord] ( identifier[bindata] , identifier[offset] ) | def FromBinary(cls, record_data, record_count=1):
"""Create an UpdateRecord subclass from binary record data.
This should be called with a binary record blob (NOT including the
record type header) and it will decode it into a ReflashControllerRecord.
Args:
record_data (bytearray): The raw record data that we wish to parse
into an UpdateRecord subclass NOT including its 8 byte record header.
record_count (int): The number of records included in record_data.
Raises:
ArgumentError: If the record_data is malformed and cannot be parsed.
Returns:
ReflashControllerRecord: The decoded reflash tile record.
"""
if len(record_data) < ReflashControllerRecord.RecordHeaderLength:
raise ArgumentError('Record was too short to contain a full reflash record header', length=len(record_data), header_length=ReflashControllerRecord.RecordHeaderLength) # depends on [control=['if'], data=[]]
(offset, data_length) = struct.unpack_from('<LL', record_data)
bindata = record_data[ReflashControllerRecord.RecordHeaderLength:]
if len(bindata) != data_length:
raise ArgumentError('Embedded firmware length did not agree with actual length of embeded data', length=len(bindata), embedded_length=data_length) # depends on [control=['if'], data=['data_length']]
return ReflashControllerRecord(bindata, offset) |
def func_call_as_str(name, *args, **kwds):
"""
Return arguments and keyword arguments as formatted string
>>> func_call_as_str('f', 1, 2, a=1)
'f(1, 2, a=1)'
"""
return '{0}({1})'.format(
name,
', '.join(itertools.chain(
map('{0!r}'.format, args),
map('{0[0]!s}={0[1]!r}'.format, sorted(kwds.items()))))) | def function[func_call_as_str, parameter[name]]:
constant[
Return arguments and keyword arguments as formatted string
>>> func_call_as_str('f', 1, 2, a=1)
'f(1, 2, a=1)'
]
return[call[constant[{0}({1})].format, parameter[name[name], call[constant[, ].join, parameter[call[name[itertools].chain, parameter[call[name[map], parameter[constant[{0!r}].format, name[args]]], call[name[map], parameter[constant[{0[0]!s}={0[1]!r}].format, call[name[sorted], parameter[call[name[kwds].items, parameter[]]]]]]]]]]]]] | keyword[def] identifier[func_call_as_str] ( identifier[name] ,* identifier[args] ,** identifier[kwds] ):
literal[string]
keyword[return] literal[string] . identifier[format] (
identifier[name] ,
literal[string] . identifier[join] ( identifier[itertools] . identifier[chain] (
identifier[map] ( literal[string] . identifier[format] , identifier[args] ),
identifier[map] ( literal[string] . identifier[format] , identifier[sorted] ( identifier[kwds] . identifier[items] ()))))) | def func_call_as_str(name, *args, **kwds):
"""
Return arguments and keyword arguments as formatted string
>>> func_call_as_str('f', 1, 2, a=1)
'f(1, 2, a=1)'
"""
return '{0}({1})'.format(name, ', '.join(itertools.chain(map('{0!r}'.format, args), map('{0[0]!s}={0[1]!r}'.format, sorted(kwds.items()))))) |
def show_relations(self):
''' display every relation in the database as (src, relation, dst) '''
for src_node in self.iter_nodes():
for relation in src_node.outgoing:
for dst_node in src_node.outgoing[relation]:
print(repr(src_node.obj), '-', relation, '-', repr(dst_node.obj)) | def function[show_relations, parameter[self]]:
constant[ display every relation in the database as (src, relation, dst) ]
for taget[name[src_node]] in starred[call[name[self].iter_nodes, parameter[]]] begin[:]
for taget[name[relation]] in starred[name[src_node].outgoing] begin[:]
for taget[name[dst_node]] in starred[call[name[src_node].outgoing][name[relation]]] begin[:]
call[name[print], parameter[call[name[repr], parameter[name[src_node].obj]], constant[-], name[relation], constant[-], call[name[repr], parameter[name[dst_node].obj]]]] | keyword[def] identifier[show_relations] ( identifier[self] ):
literal[string]
keyword[for] identifier[src_node] keyword[in] identifier[self] . identifier[iter_nodes] ():
keyword[for] identifier[relation] keyword[in] identifier[src_node] . identifier[outgoing] :
keyword[for] identifier[dst_node] keyword[in] identifier[src_node] . identifier[outgoing] [ identifier[relation] ]:
identifier[print] ( identifier[repr] ( identifier[src_node] . identifier[obj] ), literal[string] , identifier[relation] , literal[string] , identifier[repr] ( identifier[dst_node] . identifier[obj] )) | def show_relations(self):
""" display every relation in the database as (src, relation, dst) """
for src_node in self.iter_nodes():
for relation in src_node.outgoing:
for dst_node in src_node.outgoing[relation]:
print(repr(src_node.obj), '-', relation, '-', repr(dst_node.obj)) # depends on [control=['for'], data=['dst_node']] # depends on [control=['for'], data=['relation']] # depends on [control=['for'], data=['src_node']] |
def get_users_of_group(self, group_id, **kwargs): # noqa: E501
"""Get users of a group. # noqa: E501
An endpoint for listing the users of a group with details. **Example usage:** `curl https://api.us-east-1.mbedcloud.com/v3/policy-groups/{group-id}/users -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.get_users_of_group(group_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str group_id: The ID of the group whose users are retrieved. (required)
:param int limit: The number of results to return (2-1000), default is 50.
:param str after: The entity ID to fetch after the given one.
:param str order: The order of the records based on creation time, ASC or DESC; by default ASC
:param str include: Comma separated additional data to return. Currently supported: total_count
:param str status__eq: An optional filter for getting users by status.
:param str status__in: An optional filter for getting users with a specified set of statuses.
:param str status__nin: An optional filter for excluding users with a specified set of statuses.
:return: UserInfoRespList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.get_users_of_group_with_http_info(group_id, **kwargs) # noqa: E501
else:
(data) = self.get_users_of_group_with_http_info(group_id, **kwargs) # noqa: E501
return data | def function[get_users_of_group, parameter[self, group_id]]:
constant[Get users of a group. # noqa: E501
An endpoint for listing the users of a group with details. **Example usage:** `curl https://api.us-east-1.mbedcloud.com/v3/policy-groups/{group-id}/users -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.get_users_of_group(group_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str group_id: The ID of the group whose users are retrieved. (required)
:param int limit: The number of results to return (2-1000), default is 50.
:param str after: The entity ID to fetch after the given one.
:param str order: The order of the records based on creation time, ASC or DESC; by default ASC
:param str include: Comma separated additional data to return. Currently supported: total_count
:param str status__eq: An optional filter for getting users by status.
:param str status__in: An optional filter for getting users with a specified set of statuses.
:param str status__nin: An optional filter for excluding users with a specified set of statuses.
:return: UserInfoRespList
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].get_users_of_group_with_http_info, parameter[name[group_id]]]] | keyword[def] identifier[get_users_of_group] ( identifier[self] , identifier[group_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_users_of_group_with_http_info] ( identifier[group_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_users_of_group_with_http_info] ( identifier[group_id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def get_users_of_group(self, group_id, **kwargs): # noqa: E501
"Get users of a group. # noqa: E501\n\n An endpoint for listing the users of a group with details. **Example usage:** `curl https://api.us-east-1.mbedcloud.com/v3/policy-groups/{group-id}/users -H 'Authorization: Bearer API_KEY'` # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.get_users_of_group(group_id, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str group_id: The ID of the group whose users are retrieved. (required)\n :param int limit: The number of results to return (2-1000), default is 50.\n :param str after: The entity ID to fetch after the given one.\n :param str order: The order of the records based on creation time, ASC or DESC; by default ASC\n :param str include: Comma separated additional data to return. Currently supported: total_count\n :param str status__eq: An optional filter for getting users by status.\n :param str status__in: An optional filter for getting users with a specified set of statuses.\n :param str status__nin: An optional filter for excluding users with a specified set of statuses.\n :return: UserInfoRespList\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.get_users_of_group_with_http_info(group_id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.get_users_of_group_with_http_info(group_id, **kwargs) # noqa: E501
return data |
def _encode(self, obj):
"""Returns a JSON representation of a Python object - see dumps.
Accepts objects of any type, calls the appropriate type-specific encoder.
"""
if self._use_hook:
obj = self.encode_hook(obj)
# first try simple strict checks
_objtype = obj.__class__
if _objtype is str:
return self._encode_str(obj)
if _objtype is bool:
if obj:
return 'true'
else:
return 'false'
if _objtype is int or _objtype is float:
return self._encode_numbers(obj)
if _objtype is list or _objtype is tuple:
return self._encode_list(obj)
if obj is None:
return 'null'
if _objtype is dict or obj is OrderedDict:
return self._encode_dict(obj)
if _objtype is UUID:
return '"' + str(obj) + '"'
if _objtype is Decimal:
return '"' + str(obj) + '"'
# For all non-std types try __mm_json__ and then __mm_serialize__ before any isinstance
# checks
try:
sx_json_data = obj.__mm_json__
except AttributeError:
pass
else:
try:
data = sx_json_data()
except NotImplementedError:
pass
else:
if isinstance(data, bytes):
return data.decode('utf-8')
else:
return self._encode_str(data, escape_quotes=False)
try:
sx_encoder = obj.__mm_serialize__
except AttributeError:
pass
else:
try:
data = sx_encoder()
except NotImplementedError:
pass
else:
return self._encode(data)
# do more in-depth class analysis
if isinstance(obj, UUID):
return '"' + str(obj) + '"'
if isinstance(obj, str):
return self._encode_str(obj)
if isinstance(obj, (list, tuple, set, frozenset, Set)):
return self._encode_list(obj)
if isinstance(obj, Sequence) and not isinstance(obj, (bytes, bytearray)):
return self._encode_list(obj)
if isinstance(obj, (dict, OrderedDict, Mapping)):
return self._encode_dict(obj)
# note: number checks using isinstance should come after True/False checks
if isinstance(obj, Number):
return self._encode_numbers(obj)
if isinstance(obj, (date, time)):
return '"' + obj.isoformat() + '"'
return self._encode(self.default(obj)) | def function[_encode, parameter[self, obj]]:
constant[Returns a JSON representation of a Python object - see dumps.
Accepts objects of any type, calls the appropriate type-specific encoder.
]
if name[self]._use_hook begin[:]
variable[obj] assign[=] call[name[self].encode_hook, parameter[name[obj]]]
variable[_objtype] assign[=] name[obj].__class__
if compare[name[_objtype] is name[str]] begin[:]
return[call[name[self]._encode_str, parameter[name[obj]]]]
if compare[name[_objtype] is name[bool]] begin[:]
if name[obj] begin[:]
return[constant[true]]
if <ast.BoolOp object at 0x7da1b0a378e0> begin[:]
return[call[name[self]._encode_numbers, parameter[name[obj]]]]
if <ast.BoolOp object at 0x7da1b0a35bd0> begin[:]
return[call[name[self]._encode_list, parameter[name[obj]]]]
if compare[name[obj] is constant[None]] begin[:]
return[constant[null]]
if <ast.BoolOp object at 0x7da1b0a35030> begin[:]
return[call[name[self]._encode_dict, parameter[name[obj]]]]
if compare[name[_objtype] is name[UUID]] begin[:]
return[binary_operation[binary_operation[constant["] + call[name[str], parameter[name[obj]]]] + constant["]]]
if compare[name[_objtype] is name[Decimal]] begin[:]
return[binary_operation[binary_operation[constant["] + call[name[str], parameter[name[obj]]]] + constant["]]]
<ast.Try object at 0x7da1b0a37190>
<ast.Try object at 0x7da1b0a373d0>
if call[name[isinstance], parameter[name[obj], name[UUID]]] begin[:]
return[binary_operation[binary_operation[constant["] + call[name[str], parameter[name[obj]]]] + constant["]]]
if call[name[isinstance], parameter[name[obj], name[str]]] begin[:]
return[call[name[self]._encode_str, parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da1b0a34460>, <ast.Name object at 0x7da1b0a35e70>, <ast.Name object at 0x7da1b0a37760>, <ast.Name object at 0x7da1b0a35a80>, <ast.Name object at 0x7da1b0a34e50>]]]] begin[:]
return[call[name[self]._encode_list, parameter[name[obj]]]]
if <ast.BoolOp object at 0x7da1b0a37d60> begin[:]
return[call[name[self]._encode_list, parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da20e9623e0>, <ast.Name object at 0x7da20e962770>, <ast.Name object at 0x7da20e963160>]]]] begin[:]
return[call[name[self]._encode_dict, parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], name[Number]]] begin[:]
return[call[name[self]._encode_numbers, parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da20e960430>, <ast.Name object at 0x7da20e961d20>]]]] begin[:]
return[binary_operation[binary_operation[constant["] + call[name[obj].isoformat, parameter[]]] + constant["]]]
return[call[name[self]._encode, parameter[call[name[self].default, parameter[name[obj]]]]]] | keyword[def] identifier[_encode] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] identifier[self] . identifier[_use_hook] :
identifier[obj] = identifier[self] . identifier[encode_hook] ( identifier[obj] )
identifier[_objtype] = identifier[obj] . identifier[__class__]
keyword[if] identifier[_objtype] keyword[is] identifier[str] :
keyword[return] identifier[self] . identifier[_encode_str] ( identifier[obj] )
keyword[if] identifier[_objtype] keyword[is] identifier[bool] :
keyword[if] identifier[obj] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
keyword[if] identifier[_objtype] keyword[is] identifier[int] keyword[or] identifier[_objtype] keyword[is] identifier[float] :
keyword[return] identifier[self] . identifier[_encode_numbers] ( identifier[obj] )
keyword[if] identifier[_objtype] keyword[is] identifier[list] keyword[or] identifier[_objtype] keyword[is] identifier[tuple] :
keyword[return] identifier[self] . identifier[_encode_list] ( identifier[obj] )
keyword[if] identifier[obj] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[if] identifier[_objtype] keyword[is] identifier[dict] keyword[or] identifier[obj] keyword[is] identifier[OrderedDict] :
keyword[return] identifier[self] . identifier[_encode_dict] ( identifier[obj] )
keyword[if] identifier[_objtype] keyword[is] identifier[UUID] :
keyword[return] literal[string] + identifier[str] ( identifier[obj] )+ literal[string]
keyword[if] identifier[_objtype] keyword[is] identifier[Decimal] :
keyword[return] literal[string] + identifier[str] ( identifier[obj] )+ literal[string]
keyword[try] :
identifier[sx_json_data] = identifier[obj] . identifier[__mm_json__]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
keyword[try] :
identifier[data] = identifier[sx_json_data] ()
keyword[except] identifier[NotImplementedError] :
keyword[pass]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[data] , identifier[bytes] ):
keyword[return] identifier[data] . identifier[decode] ( literal[string] )
keyword[else] :
keyword[return] identifier[self] . identifier[_encode_str] ( identifier[data] , identifier[escape_quotes] = keyword[False] )
keyword[try] :
identifier[sx_encoder] = identifier[obj] . identifier[__mm_serialize__]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
keyword[try] :
identifier[data] = identifier[sx_encoder] ()
keyword[except] identifier[NotImplementedError] :
keyword[pass]
keyword[else] :
keyword[return] identifier[self] . identifier[_encode] ( identifier[data] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[UUID] ):
keyword[return] literal[string] + identifier[str] ( identifier[obj] )+ literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[str] ):
keyword[return] identifier[self] . identifier[_encode_str] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[frozenset] , identifier[Set] )):
keyword[return] identifier[self] . identifier[_encode_list] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Sequence] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[obj] ,( identifier[bytes] , identifier[bytearray] )):
keyword[return] identifier[self] . identifier[_encode_list] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[dict] , identifier[OrderedDict] , identifier[Mapping] )):
keyword[return] identifier[self] . identifier[_encode_dict] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Number] ):
keyword[return] identifier[self] . identifier[_encode_numbers] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[date] , identifier[time] )):
keyword[return] literal[string] + identifier[obj] . identifier[isoformat] ()+ literal[string]
keyword[return] identifier[self] . identifier[_encode] ( identifier[self] . identifier[default] ( identifier[obj] )) | def _encode(self, obj):
"""Returns a JSON representation of a Python object - see dumps.
Accepts objects of any type, calls the appropriate type-specific encoder.
"""
if self._use_hook:
obj = self.encode_hook(obj) # depends on [control=['if'], data=[]]
# first try simple strict checks
_objtype = obj.__class__
if _objtype is str:
return self._encode_str(obj) # depends on [control=['if'], data=[]]
if _objtype is bool:
if obj:
return 'true' # depends on [control=['if'], data=[]]
else:
return 'false' # depends on [control=['if'], data=[]]
if _objtype is int or _objtype is float:
return self._encode_numbers(obj) # depends on [control=['if'], data=[]]
if _objtype is list or _objtype is tuple:
return self._encode_list(obj) # depends on [control=['if'], data=[]]
if obj is None:
return 'null' # depends on [control=['if'], data=[]]
if _objtype is dict or obj is OrderedDict:
return self._encode_dict(obj) # depends on [control=['if'], data=[]]
if _objtype is UUID:
return '"' + str(obj) + '"' # depends on [control=['if'], data=[]]
if _objtype is Decimal:
return '"' + str(obj) + '"' # depends on [control=['if'], data=[]]
# For all non-std types try __mm_json__ and then __mm_serialize__ before any isinstance
# checks
try:
sx_json_data = obj.__mm_json__ # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
try:
data = sx_json_data() # depends on [control=['try'], data=[]]
except NotImplementedError:
pass # depends on [control=['except'], data=[]]
else:
if isinstance(data, bytes):
return data.decode('utf-8') # depends on [control=['if'], data=[]]
else:
return self._encode_str(data, escape_quotes=False)
try:
sx_encoder = obj.__mm_serialize__ # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
try:
data = sx_encoder() # depends on [control=['try'], data=[]]
except NotImplementedError:
pass # depends on [control=['except'], data=[]]
else:
return self._encode(data)
# do more in-depth class analysis
if isinstance(obj, UUID):
return '"' + str(obj) + '"' # depends on [control=['if'], data=[]]
if isinstance(obj, str):
return self._encode_str(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, (list, tuple, set, frozenset, Set)):
return self._encode_list(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, Sequence) and (not isinstance(obj, (bytes, bytearray))):
return self._encode_list(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, (dict, OrderedDict, Mapping)):
return self._encode_dict(obj) # depends on [control=['if'], data=[]]
# note: number checks using isinstance should come after True/False checks
if isinstance(obj, Number):
return self._encode_numbers(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, (date, time)):
return '"' + obj.isoformat() + '"' # depends on [control=['if'], data=[]]
return self._encode(self.default(obj)) |
def has_readable_server(self, read_preference=ReadPreference.PRIMARY):
"""Does this topology have any readable servers available matching the
given read preference?
:Parameters:
- `read_preference`: an instance of a read preference from
:mod:`~pymongo.read_preferences`. Defaults to
:attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`.
.. note:: When connected directly to a single server this method
always returns ``True``.
.. versionadded:: 3.4
"""
common.validate_read_preference("read_preference", read_preference)
return any(self.apply_selector(read_preference, None)) | def function[has_readable_server, parameter[self, read_preference]]:
constant[Does this topology have any readable servers available matching the
given read preference?
:Parameters:
- `read_preference`: an instance of a read preference from
:mod:`~pymongo.read_preferences`. Defaults to
:attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`.
.. note:: When connected directly to a single server this method
always returns ``True``.
.. versionadded:: 3.4
]
call[name[common].validate_read_preference, parameter[constant[read_preference], name[read_preference]]]
return[call[name[any], parameter[call[name[self].apply_selector, parameter[name[read_preference], constant[None]]]]]] | keyword[def] identifier[has_readable_server] ( identifier[self] , identifier[read_preference] = identifier[ReadPreference] . identifier[PRIMARY] ):
literal[string]
identifier[common] . identifier[validate_read_preference] ( literal[string] , identifier[read_preference] )
keyword[return] identifier[any] ( identifier[self] . identifier[apply_selector] ( identifier[read_preference] , keyword[None] )) | def has_readable_server(self, read_preference=ReadPreference.PRIMARY):
"""Does this topology have any readable servers available matching the
given read preference?
:Parameters:
- `read_preference`: an instance of a read preference from
:mod:`~pymongo.read_preferences`. Defaults to
:attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`.
.. note:: When connected directly to a single server this method
always returns ``True``.
.. versionadded:: 3.4
"""
common.validate_read_preference('read_preference', read_preference)
return any(self.apply_selector(read_preference, None)) |
def getLeapSecondLastUpdated(): # @NoSelf
"""
Shows the latest date a leap second was added to the leap second table.
"""
print('Leap second last updated:', str(CDFepoch.LTS[-1][0]) + '-' +
str(CDFepoch.LTS[-1][1]) + '-' + str(CDFepoch.LTS[-1][2])) | def function[getLeapSecondLastUpdated, parameter[]]:
constant[
Shows the latest date a leap second was added to the leap second table.
]
call[name[print], parameter[constant[Leap second last updated:], binary_operation[binary_operation[binary_operation[binary_operation[call[name[str], parameter[call[call[name[CDFepoch].LTS][<ast.UnaryOp object at 0x7da1b068d390>]][constant[0]]]] + constant[-]] + call[name[str], parameter[call[call[name[CDFepoch].LTS][<ast.UnaryOp object at 0x7da1b068d180>]][constant[1]]]]] + constant[-]] + call[name[str], parameter[call[call[name[CDFepoch].LTS][<ast.UnaryOp object at 0x7da1b069c220>]][constant[2]]]]]]] | keyword[def] identifier[getLeapSecondLastUpdated] ():
literal[string]
identifier[print] ( literal[string] , identifier[str] ( identifier[CDFepoch] . identifier[LTS] [- literal[int] ][ literal[int] ])+ literal[string] +
identifier[str] ( identifier[CDFepoch] . identifier[LTS] [- literal[int] ][ literal[int] ])+ literal[string] + identifier[str] ( identifier[CDFepoch] . identifier[LTS] [- literal[int] ][ literal[int] ])) | def getLeapSecondLastUpdated(): # @NoSelf
'\n Shows the latest date a leap second was added to the leap second table.\n '
print('Leap second last updated:', str(CDFepoch.LTS[-1][0]) + '-' + str(CDFepoch.LTS[-1][1]) + '-' + str(CDFepoch.LTS[-1][2])) |
def qname(self, uri: str) -> str:
''' Returns qname of uri in rdflib graph while also saving it '''
try:
prefix, namespace, name = self.g.compute_qname(uri)
qname = prefix + ':' + name
return qname
except:
try:
print('prefix:', prefix)
print('namespace:', namespace)
print('name:', name)
except:
print('Could not print from compute_qname')
exit('No qname for ' + uri) | def function[qname, parameter[self, uri]]:
constant[ Returns qname of uri in rdflib graph while also saving it ]
<ast.Try object at 0x7da1b1aa49d0> | keyword[def] identifier[qname] ( identifier[self] , identifier[uri] : identifier[str] )-> identifier[str] :
literal[string]
keyword[try] :
identifier[prefix] , identifier[namespace] , identifier[name] = identifier[self] . identifier[g] . identifier[compute_qname] ( identifier[uri] )
identifier[qname] = identifier[prefix] + literal[string] + identifier[name]
keyword[return] identifier[qname]
keyword[except] :
keyword[try] :
identifier[print] ( literal[string] , identifier[prefix] )
identifier[print] ( literal[string] , identifier[namespace] )
identifier[print] ( literal[string] , identifier[name] )
keyword[except] :
identifier[print] ( literal[string] )
identifier[exit] ( literal[string] + identifier[uri] ) | def qname(self, uri: str) -> str:
""" Returns qname of uri in rdflib graph while also saving it """
try:
(prefix, namespace, name) = self.g.compute_qname(uri)
qname = prefix + ':' + name
return qname # depends on [control=['try'], data=[]]
except:
try:
print('prefix:', prefix)
print('namespace:', namespace)
print('name:', name) # depends on [control=['try'], data=[]]
except:
print('Could not print from compute_qname') # depends on [control=['except'], data=[]]
exit('No qname for ' + uri) # depends on [control=['except'], data=[]] |
def _fill(self):
"""Advance the iterator without returning the old head."""
try:
self._head = self._iterable.next()
except StopIteration:
self._head = None | def function[_fill, parameter[self]]:
constant[Advance the iterator without returning the old head.]
<ast.Try object at 0x7da18f09ffa0> | keyword[def] identifier[_fill] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[_head] = identifier[self] . identifier[_iterable] . identifier[next] ()
keyword[except] identifier[StopIteration] :
identifier[self] . identifier[_head] = keyword[None] | def _fill(self):
"""Advance the iterator without returning the old head."""
try:
self._head = self._iterable.next() # depends on [control=['try'], data=[]]
except StopIteration:
self._head = None # depends on [control=['except'], data=[]] |
def friendly_type_name(raw_type: typing.Type) -> str:
"""
Returns a user-friendly type name
:param raw_type: raw type (str, int, ...)
:return: user friendly type as string
"""
try:
return _TRANSLATE_TYPE[raw_type]
except KeyError:
LOGGER.error('unmanaged value type: %s', raw_type)
return str(raw_type) | def function[friendly_type_name, parameter[raw_type]]:
constant[
Returns a user-friendly type name
:param raw_type: raw type (str, int, ...)
:return: user friendly type as string
]
<ast.Try object at 0x7da18bc729b0> | keyword[def] identifier[friendly_type_name] ( identifier[raw_type] : identifier[typing] . identifier[Type] )-> identifier[str] :
literal[string]
keyword[try] :
keyword[return] identifier[_TRANSLATE_TYPE] [ identifier[raw_type] ]
keyword[except] identifier[KeyError] :
identifier[LOGGER] . identifier[error] ( literal[string] , identifier[raw_type] )
keyword[return] identifier[str] ( identifier[raw_type] ) | def friendly_type_name(raw_type: typing.Type) -> str:
"""
Returns a user-friendly type name
:param raw_type: raw type (str, int, ...)
:return: user friendly type as string
"""
try:
return _TRANSLATE_TYPE[raw_type] # depends on [control=['try'], data=[]]
except KeyError:
LOGGER.error('unmanaged value type: %s', raw_type)
return str(raw_type) # depends on [control=['except'], data=[]] |
def copy_file_tree(src: str,
dest: str,
ctx: Optional[Dict[str, Any]] = None,
option_locations: Optional[List[Tuple[str, List[str]]]] = None):
"""
Copy the file tree under the :param:`src` directory to the :param:`dest`
directory. Pass :param:`ctx` to support rendering the files, and pass
:param:`option_locations` to support deleting optional files/folders.
"""
if os.path.exists(dest):
shutil.rmtree(dest, ignore_errors=True)
shutil.copytree(src, dest)
if option_locations:
for option, paths in option_locations:
for path in paths:
path = os.path.join(dest, path)
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path, ignore_errors=True)
if 'app_bundle_module_name' in ctx:
shutil.move(os.path.join(dest, 'app'),
os.path.join(dest, ctx['app_bundle_module_name']))
shutil.move(os.path.join(dest, 'tests', 'app'),
os.path.join(dest, 'tests', ctx['app_bundle_module_name']))
_render_file_tree(dest, ctx) | def function[copy_file_tree, parameter[src, dest, ctx, option_locations]]:
constant[
Copy the file tree under the :param:`src` directory to the :param:`dest`
directory. Pass :param:`ctx` to support rendering the files, and pass
:param:`option_locations` to support deleting optional files/folders.
]
if call[name[os].path.exists, parameter[name[dest]]] begin[:]
call[name[shutil].rmtree, parameter[name[dest]]]
call[name[shutil].copytree, parameter[name[src], name[dest]]]
if name[option_locations] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c990a90>, <ast.Name object at 0x7da20c991fc0>]]] in starred[name[option_locations]] begin[:]
for taget[name[path]] in starred[name[paths]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[dest], name[path]]]
if call[name[os].path.isfile, parameter[name[path]]] begin[:]
call[name[os].remove, parameter[name[path]]]
if compare[constant[app_bundle_module_name] in name[ctx]] begin[:]
call[name[shutil].move, parameter[call[name[os].path.join, parameter[name[dest], constant[app]]], call[name[os].path.join, parameter[name[dest], call[name[ctx]][constant[app_bundle_module_name]]]]]]
call[name[shutil].move, parameter[call[name[os].path.join, parameter[name[dest], constant[tests], constant[app]]], call[name[os].path.join, parameter[name[dest], constant[tests], call[name[ctx]][constant[app_bundle_module_name]]]]]]
call[name[_render_file_tree], parameter[name[dest], name[ctx]]] | keyword[def] identifier[copy_file_tree] ( identifier[src] : identifier[str] ,
identifier[dest] : identifier[str] ,
identifier[ctx] : identifier[Optional] [ identifier[Dict] [ identifier[str] , identifier[Any] ]]= keyword[None] ,
identifier[option_locations] : identifier[Optional] [ identifier[List] [ identifier[Tuple] [ identifier[str] , identifier[List] [ identifier[str] ]]]]= keyword[None] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dest] ):
identifier[shutil] . identifier[rmtree] ( identifier[dest] , identifier[ignore_errors] = keyword[True] )
identifier[shutil] . identifier[copytree] ( identifier[src] , identifier[dest] )
keyword[if] identifier[option_locations] :
keyword[for] identifier[option] , identifier[paths] keyword[in] identifier[option_locations] :
keyword[for] identifier[path] keyword[in] identifier[paths] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , identifier[path] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
identifier[os] . identifier[remove] ( identifier[path] )
keyword[else] :
identifier[shutil] . identifier[rmtree] ( identifier[path] , identifier[ignore_errors] = keyword[True] )
keyword[if] literal[string] keyword[in] identifier[ctx] :
identifier[shutil] . identifier[move] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , identifier[ctx] [ literal[string] ]))
identifier[shutil] . identifier[move] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , literal[string] , identifier[ctx] [ literal[string] ]))
identifier[_render_file_tree] ( identifier[dest] , identifier[ctx] ) | def copy_file_tree(src: str, dest: str, ctx: Optional[Dict[str, Any]]=None, option_locations: Optional[List[Tuple[str, List[str]]]]=None):
"""
Copy the file tree under the :param:`src` directory to the :param:`dest`
directory. Pass :param:`ctx` to support rendering the files, and pass
:param:`option_locations` to support deleting optional files/folders.
"""
if os.path.exists(dest):
shutil.rmtree(dest, ignore_errors=True) # depends on [control=['if'], data=[]]
shutil.copytree(src, dest)
if option_locations:
for (option, paths) in option_locations:
for path in paths:
path = os.path.join(dest, path)
if os.path.isfile(path):
os.remove(path) # depends on [control=['if'], data=[]]
else:
shutil.rmtree(path, ignore_errors=True) # depends on [control=['for'], data=['path']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if 'app_bundle_module_name' in ctx:
shutil.move(os.path.join(dest, 'app'), os.path.join(dest, ctx['app_bundle_module_name']))
shutil.move(os.path.join(dest, 'tests', 'app'), os.path.join(dest, 'tests', ctx['app_bundle_module_name'])) # depends on [control=['if'], data=['ctx']]
_render_file_tree(dest, ctx) |
def processFlat(self):
"""Main process.
Returns
-------
est_idxs : np.array(N)
Estimated times for the segment boundaries in frame indeces.
est_labels : np.array(N-1)
Estimated labels for the segments.
"""
# Structural Features params
Mp = self.config["Mp_adaptive"] # Size of the adaptive threshold for
# peak picking
od = self.config["offset_thres"] # Offset coefficient for adaptive
# thresholding
M = self.config["M_gaussian"] # Size of gaussian kernel in beats
m = self.config["m_embedded"] # Number of embedded dimensions
k = self.config["k_nearest"] # k*N-nearest neighbors for the
# recurrence plot
# Preprocess to obtain features, times, and input boundary indeces
F = self._preprocess()
# Normalize
F = U.normalize(F, norm_type=self.config["bound_norm_feats"])
# Check size in case the track is too short
if F.shape[0] > 20:
if self.framesync:
red = 0.1
F_copy = np.copy(F)
F = librosa.util.utils.sync(
F.T, np.linspace(0, F.shape[0], num=F.shape[0] * red),
pad=False).T
# Emedding the feature space (i.e. shingle)
E = embedded_space(F, m)
# plt.imshow(E.T, interpolation="nearest", aspect="auto"); plt.show()
# Recurrence matrix
R = librosa.segment.recurrence_matrix(
E.T,
k=k * int(F.shape[0]),
width=1, # zeros from the diagonal
metric="euclidean",
sym=True).astype(np.float32)
# Circular shift
L = circular_shift(R)
#plt.imshow(L, interpolation="nearest", cmap=plt.get_cmap("binary"))
#plt.show()
# Obtain structural features by filtering the lag matrix
SF = gaussian_filter(L.T, M=M, axis=1)
SF = gaussian_filter(L.T, M=1, axis=0)
# plt.imshow(SF.T, interpolation="nearest", aspect="auto")
#plt.show()
# Compute the novelty curve
nc = compute_nc(SF)
# Find peaks in the novelty curve
est_bounds = pick_peaks(nc, L=Mp, offset_denom=od)
# Re-align embedded space
est_bounds = np.asarray(est_bounds) + int(np.ceil(m / 2.))
if self.framesync:
est_bounds /= red
F = F_copy
else:
est_bounds = []
# Add first and last frames
est_idxs = np.concatenate(([0], est_bounds, [F.shape[0] - 1]))
est_idxs = np.unique(est_idxs)
assert est_idxs[0] == 0 and est_idxs[-1] == F.shape[0] - 1
# Empty labels
est_labels = np.ones(len(est_idxs) - 1) * - 1
# Post process estimations
est_idxs, est_labels = self._postprocess(est_idxs, est_labels)
# plt.figure(1)
# plt.plot(nc);
# [plt.axvline(p, color="m", ymin=.6) for p in est_bounds]
# [plt.axvline(b, color="b", ymax=.6, ymin=.3) for b in brian_bounds]
# [plt.axvline(b, color="g", ymax=.3) for b in ann_bounds]
# plt.show()
return est_idxs, est_labels | def function[processFlat, parameter[self]]:
constant[Main process.
Returns
-------
est_idxs : np.array(N)
Estimated times for the segment boundaries in frame indeces.
est_labels : np.array(N-1)
Estimated labels for the segments.
]
variable[Mp] assign[=] call[name[self].config][constant[Mp_adaptive]]
variable[od] assign[=] call[name[self].config][constant[offset_thres]]
variable[M] assign[=] call[name[self].config][constant[M_gaussian]]
variable[m] assign[=] call[name[self].config][constant[m_embedded]]
variable[k] assign[=] call[name[self].config][constant[k_nearest]]
variable[F] assign[=] call[name[self]._preprocess, parameter[]]
variable[F] assign[=] call[name[U].normalize, parameter[name[F]]]
if compare[call[name[F].shape][constant[0]] greater[>] constant[20]] begin[:]
if name[self].framesync begin[:]
variable[red] assign[=] constant[0.1]
variable[F_copy] assign[=] call[name[np].copy, parameter[name[F]]]
variable[F] assign[=] call[name[librosa].util.utils.sync, parameter[name[F].T, call[name[np].linspace, parameter[constant[0], call[name[F].shape][constant[0]]]]]].T
variable[E] assign[=] call[name[embedded_space], parameter[name[F], name[m]]]
variable[R] assign[=] call[call[name[librosa].segment.recurrence_matrix, parameter[name[E].T]].astype, parameter[name[np].float32]]
variable[L] assign[=] call[name[circular_shift], parameter[name[R]]]
variable[SF] assign[=] call[name[gaussian_filter], parameter[name[L].T]]
variable[SF] assign[=] call[name[gaussian_filter], parameter[name[L].T]]
variable[nc] assign[=] call[name[compute_nc], parameter[name[SF]]]
variable[est_bounds] assign[=] call[name[pick_peaks], parameter[name[nc]]]
variable[est_bounds] assign[=] binary_operation[call[name[np].asarray, parameter[name[est_bounds]]] + call[name[int], parameter[call[name[np].ceil, parameter[binary_operation[name[m] / constant[2.0]]]]]]]
if name[self].framesync begin[:]
<ast.AugAssign object at 0x7da1b03ba230>
variable[F] assign[=] name[F_copy]
variable[est_idxs] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b02f5990>, <ast.Name object at 0x7da1b02f5930>, <ast.List object at 0x7da1b02f5900>]]]]
variable[est_idxs] assign[=] call[name[np].unique, parameter[name[est_idxs]]]
assert[<ast.BoolOp object at 0x7da1b02f5660>]
variable[est_labels] assign[=] binary_operation[call[name[np].ones, parameter[binary_operation[call[name[len], parameter[name[est_idxs]]] - constant[1]]]] * <ast.UnaryOp object at 0x7da1b02f5120>]
<ast.Tuple object at 0x7da1b02f5090> assign[=] call[name[self]._postprocess, parameter[name[est_idxs], name[est_labels]]]
return[tuple[[<ast.Name object at 0x7da1b02f4eb0>, <ast.Name object at 0x7da1b02f4e80>]]] | keyword[def] identifier[processFlat] ( identifier[self] ):
literal[string]
identifier[Mp] = identifier[self] . identifier[config] [ literal[string] ]
identifier[od] = identifier[self] . identifier[config] [ literal[string] ]
identifier[M] = identifier[self] . identifier[config] [ literal[string] ]
identifier[m] = identifier[self] . identifier[config] [ literal[string] ]
identifier[k] = identifier[self] . identifier[config] [ literal[string] ]
identifier[F] = identifier[self] . identifier[_preprocess] ()
identifier[F] = identifier[U] . identifier[normalize] ( identifier[F] , identifier[norm_type] = identifier[self] . identifier[config] [ literal[string] ])
keyword[if] identifier[F] . identifier[shape] [ literal[int] ]> literal[int] :
keyword[if] identifier[self] . identifier[framesync] :
identifier[red] = literal[int]
identifier[F_copy] = identifier[np] . identifier[copy] ( identifier[F] )
identifier[F] = identifier[librosa] . identifier[util] . identifier[utils] . identifier[sync] (
identifier[F] . identifier[T] , identifier[np] . identifier[linspace] ( literal[int] , identifier[F] . identifier[shape] [ literal[int] ], identifier[num] = identifier[F] . identifier[shape] [ literal[int] ]* identifier[red] ),
identifier[pad] = keyword[False] ). identifier[T]
identifier[E] = identifier[embedded_space] ( identifier[F] , identifier[m] )
identifier[R] = identifier[librosa] . identifier[segment] . identifier[recurrence_matrix] (
identifier[E] . identifier[T] ,
identifier[k] = identifier[k] * identifier[int] ( identifier[F] . identifier[shape] [ literal[int] ]),
identifier[width] = literal[int] ,
identifier[metric] = literal[string] ,
identifier[sym] = keyword[True] ). identifier[astype] ( identifier[np] . identifier[float32] )
identifier[L] = identifier[circular_shift] ( identifier[R] )
identifier[SF] = identifier[gaussian_filter] ( identifier[L] . identifier[T] , identifier[M] = identifier[M] , identifier[axis] = literal[int] )
identifier[SF] = identifier[gaussian_filter] ( identifier[L] . identifier[T] , identifier[M] = literal[int] , identifier[axis] = literal[int] )
identifier[nc] = identifier[compute_nc] ( identifier[SF] )
identifier[est_bounds] = identifier[pick_peaks] ( identifier[nc] , identifier[L] = identifier[Mp] , identifier[offset_denom] = identifier[od] )
identifier[est_bounds] = identifier[np] . identifier[asarray] ( identifier[est_bounds] )+ identifier[int] ( identifier[np] . identifier[ceil] ( identifier[m] / literal[int] ))
keyword[if] identifier[self] . identifier[framesync] :
identifier[est_bounds] /= identifier[red]
identifier[F] = identifier[F_copy]
keyword[else] :
identifier[est_bounds] =[]
identifier[est_idxs] = identifier[np] . identifier[concatenate] (([ literal[int] ], identifier[est_bounds] ,[ identifier[F] . identifier[shape] [ literal[int] ]- literal[int] ]))
identifier[est_idxs] = identifier[np] . identifier[unique] ( identifier[est_idxs] )
keyword[assert] identifier[est_idxs] [ literal[int] ]== literal[int] keyword[and] identifier[est_idxs] [- literal[int] ]== identifier[F] . identifier[shape] [ literal[int] ]- literal[int]
identifier[est_labels] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[est_idxs] )- literal[int] )*- literal[int]
identifier[est_idxs] , identifier[est_labels] = identifier[self] . identifier[_postprocess] ( identifier[est_idxs] , identifier[est_labels] )
keyword[return] identifier[est_idxs] , identifier[est_labels] | def processFlat(self):
"""Main process.
Returns
-------
est_idxs : np.array(N)
Estimated times for the segment boundaries in frame indeces.
est_labels : np.array(N-1)
Estimated labels for the segments.
"""
# Structural Features params
Mp = self.config['Mp_adaptive'] # Size of the adaptive threshold for
# peak picking
od = self.config['offset_thres'] # Offset coefficient for adaptive
# thresholding
M = self.config['M_gaussian'] # Size of gaussian kernel in beats
m = self.config['m_embedded'] # Number of embedded dimensions
k = self.config['k_nearest'] # k*N-nearest neighbors for the
# recurrence plot
# Preprocess to obtain features, times, and input boundary indeces
F = self._preprocess()
# Normalize
F = U.normalize(F, norm_type=self.config['bound_norm_feats'])
# Check size in case the track is too short
if F.shape[0] > 20:
if self.framesync:
red = 0.1
F_copy = np.copy(F)
F = librosa.util.utils.sync(F.T, np.linspace(0, F.shape[0], num=F.shape[0] * red), pad=False).T # depends on [control=['if'], data=[]]
# Emedding the feature space (i.e. shingle)
E = embedded_space(F, m)
# plt.imshow(E.T, interpolation="nearest", aspect="auto"); plt.show()
# Recurrence matrix
# zeros from the diagonal
R = librosa.segment.recurrence_matrix(E.T, k=k * int(F.shape[0]), width=1, metric='euclidean', sym=True).astype(np.float32)
# Circular shift
L = circular_shift(R)
#plt.imshow(L, interpolation="nearest", cmap=plt.get_cmap("binary"))
#plt.show()
# Obtain structural features by filtering the lag matrix
SF = gaussian_filter(L.T, M=M, axis=1)
SF = gaussian_filter(L.T, M=1, axis=0)
# plt.imshow(SF.T, interpolation="nearest", aspect="auto")
#plt.show()
# Compute the novelty curve
nc = compute_nc(SF)
# Find peaks in the novelty curve
est_bounds = pick_peaks(nc, L=Mp, offset_denom=od)
# Re-align embedded space
est_bounds = np.asarray(est_bounds) + int(np.ceil(m / 2.0))
if self.framesync:
est_bounds /= red
F = F_copy # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
est_bounds = []
# Add first and last frames
est_idxs = np.concatenate(([0], est_bounds, [F.shape[0] - 1]))
est_idxs = np.unique(est_idxs)
assert est_idxs[0] == 0 and est_idxs[-1] == F.shape[0] - 1
# Empty labels
est_labels = np.ones(len(est_idxs) - 1) * -1
# Post process estimations
(est_idxs, est_labels) = self._postprocess(est_idxs, est_labels)
# plt.figure(1)
# plt.plot(nc);
# [plt.axvline(p, color="m", ymin=.6) for p in est_bounds]
# [plt.axvline(b, color="b", ymax=.6, ymin=.3) for b in brian_bounds]
# [plt.axvline(b, color="g", ymax=.3) for b in ann_bounds]
# plt.show()
return (est_idxs, est_labels) |
def _get_representative(self, obj):
"""Finds and returns the root of the set containing `obj`."""
if obj not in self._parents:
self._parents[obj] = obj
self._weights[obj] = 1
self._prev_next[obj] = [obj, obj]
self._min_values[obj] = obj
return obj
path = [obj]
root = self._parents[obj]
while root != path[-1]:
path.append(root)
root = self._parents[root]
# compress the path and return
for ancestor in path:
self._parents[ancestor] = root
return root | def function[_get_representative, parameter[self, obj]]:
constant[Finds and returns the root of the set containing `obj`.]
if compare[name[obj] <ast.NotIn object at 0x7da2590d7190> name[self]._parents] begin[:]
call[name[self]._parents][name[obj]] assign[=] name[obj]
call[name[self]._weights][name[obj]] assign[=] constant[1]
call[name[self]._prev_next][name[obj]] assign[=] list[[<ast.Name object at 0x7da18ede4d30>, <ast.Name object at 0x7da18ede6d40>]]
call[name[self]._min_values][name[obj]] assign[=] name[obj]
return[name[obj]]
variable[path] assign[=] list[[<ast.Name object at 0x7da18ede5960>]]
variable[root] assign[=] call[name[self]._parents][name[obj]]
while compare[name[root] not_equal[!=] call[name[path]][<ast.UnaryOp object at 0x7da18bccb9a0>]] begin[:]
call[name[path].append, parameter[name[root]]]
variable[root] assign[=] call[name[self]._parents][name[root]]
for taget[name[ancestor]] in starred[name[path]] begin[:]
call[name[self]._parents][name[ancestor]] assign[=] name[root]
return[name[root]] | keyword[def] identifier[_get_representative] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] identifier[obj] keyword[not] keyword[in] identifier[self] . identifier[_parents] :
identifier[self] . identifier[_parents] [ identifier[obj] ]= identifier[obj]
identifier[self] . identifier[_weights] [ identifier[obj] ]= literal[int]
identifier[self] . identifier[_prev_next] [ identifier[obj] ]=[ identifier[obj] , identifier[obj] ]
identifier[self] . identifier[_min_values] [ identifier[obj] ]= identifier[obj]
keyword[return] identifier[obj]
identifier[path] =[ identifier[obj] ]
identifier[root] = identifier[self] . identifier[_parents] [ identifier[obj] ]
keyword[while] identifier[root] != identifier[path] [- literal[int] ]:
identifier[path] . identifier[append] ( identifier[root] )
identifier[root] = identifier[self] . identifier[_parents] [ identifier[root] ]
keyword[for] identifier[ancestor] keyword[in] identifier[path] :
identifier[self] . identifier[_parents] [ identifier[ancestor] ]= identifier[root]
keyword[return] identifier[root] | def _get_representative(self, obj):
"""Finds and returns the root of the set containing `obj`."""
if obj not in self._parents:
self._parents[obj] = obj
self._weights[obj] = 1
self._prev_next[obj] = [obj, obj]
self._min_values[obj] = obj
return obj # depends on [control=['if'], data=['obj']]
path = [obj]
root = self._parents[obj]
while root != path[-1]:
path.append(root)
root = self._parents[root] # depends on [control=['while'], data=['root']]
# compress the path and return
for ancestor in path:
self._parents[ancestor] = root # depends on [control=['for'], data=['ancestor']]
return root |
def generate(self, data, *args, **kwargs):
"""
根据传入的数据结构生成最终用于推送的文件字节字符串( :func:`bytes` ),
MoEar会将其持久化并用于之后的推送任务
:param dict data: 待打包的数据结构
:return: 返回生成的书籍打包输出字节
:rtype: bytes
"""
with tempfile.TemporaryDirectory() as tmpdirname:
self.options.setdefault('package_build_dir', tmpdirname)
crawler = CrawlerScript(self.options)
crawler.crawl(data, self.spider, *args, **kwargs)
output_file = os.path.join(
self.options['package_build_dir'], 'source', 'moear.mobi')
with open(output_file, 'rb') as fh:
content = fh.read()
return content | def function[generate, parameter[self, data]]:
constant[
根据传入的数据结构生成最终用于推送的文件字节字符串( :func:`bytes` ),
MoEar会将其持久化并用于之后的推送任务
:param dict data: 待打包的数据结构
:return: 返回生成的书籍打包输出字节
:rtype: bytes
]
with call[name[tempfile].TemporaryDirectory, parameter[]] begin[:]
call[name[self].options.setdefault, parameter[constant[package_build_dir], name[tmpdirname]]]
variable[crawler] assign[=] call[name[CrawlerScript], parameter[name[self].options]]
call[name[crawler].crawl, parameter[name[data], name[self].spider, <ast.Starred object at 0x7da18eb57910>]]
variable[output_file] assign[=] call[name[os].path.join, parameter[call[name[self].options][constant[package_build_dir]], constant[source], constant[moear.mobi]]]
with call[name[open], parameter[name[output_file], constant[rb]]] begin[:]
variable[content] assign[=] call[name[fh].read, parameter[]]
return[name[content]] | keyword[def] identifier[generate] ( identifier[self] , identifier[data] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[tempfile] . identifier[TemporaryDirectory] () keyword[as] identifier[tmpdirname] :
identifier[self] . identifier[options] . identifier[setdefault] ( literal[string] , identifier[tmpdirname] )
identifier[crawler] = identifier[CrawlerScript] ( identifier[self] . identifier[options] )
identifier[crawler] . identifier[crawl] ( identifier[data] , identifier[self] . identifier[spider] ,* identifier[args] ,** identifier[kwargs] )
identifier[output_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[options] [ literal[string] ], literal[string] , literal[string] )
keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[fh] :
identifier[content] = identifier[fh] . identifier[read] ()
keyword[return] identifier[content] | def generate(self, data, *args, **kwargs):
"""
根据传入的数据结构生成最终用于推送的文件字节字符串( :func:`bytes` ),
MoEar会将其持久化并用于之后的推送任务
:param dict data: 待打包的数据结构
:return: 返回生成的书籍打包输出字节
:rtype: bytes
"""
with tempfile.TemporaryDirectory() as tmpdirname:
self.options.setdefault('package_build_dir', tmpdirname)
crawler = CrawlerScript(self.options)
crawler.crawl(data, self.spider, *args, **kwargs)
output_file = os.path.join(self.options['package_build_dir'], 'source', 'moear.mobi')
with open(output_file, 'rb') as fh:
content = fh.read() # depends on [control=['with'], data=['fh']] # depends on [control=['with'], data=['tmpdirname']]
return content |
def command(self, cmd_code, cmd_data, timeout):
"""Send a host command and return the chip response.
"""
log.log(logging.DEBUG-1, self.CMD[cmd_code]+" "+hexlify(cmd_data))
frame = bytearray([0xD4, cmd_code]) + bytearray(cmd_data)
frame = bytearray([0xFF, 0x00, 0x00, 0x00, len(frame)]) + frame
frame = self.ccid_xfr_block(frame, timeout)
if not frame or len(frame) < 4:
log.error("insufficient data for decoding chip response")
raise IOError(errno.EIO, os.strerror(errno.EIO))
if not (frame[0] == 0xD5 and frame[1] == cmd_code + 1):
log.error("received invalid chip response")
raise IOError(errno.EIO, os.strerror(errno.EIO))
if not (frame[-2] == 0x90 and frame[-1] == 0x00):
log.error("received pseudo apdu with error status")
raise IOError(errno.EIO, os.strerror(errno.EIO))
return frame[2:-2] | def function[command, parameter[self, cmd_code, cmd_data, timeout]]:
constant[Send a host command and return the chip response.
]
call[name[log].log, parameter[binary_operation[name[logging].DEBUG - constant[1]], binary_operation[binary_operation[call[name[self].CMD][name[cmd_code]] + constant[ ]] + call[name[hexlify], parameter[name[cmd_data]]]]]]
variable[frame] assign[=] binary_operation[call[name[bytearray], parameter[list[[<ast.Constant object at 0x7da18dc05e40>, <ast.Name object at 0x7da18dc05780>]]]] + call[name[bytearray], parameter[name[cmd_data]]]]
variable[frame] assign[=] binary_operation[call[name[bytearray], parameter[list[[<ast.Constant object at 0x7da18dc07280>, <ast.Constant object at 0x7da18dc058d0>, <ast.Constant object at 0x7da18dc04fa0>, <ast.Constant object at 0x7da18dc069e0>, <ast.Call object at 0x7da18dc07fd0>]]]] + name[frame]]
variable[frame] assign[=] call[name[self].ccid_xfr_block, parameter[name[frame], name[timeout]]]
if <ast.BoolOp object at 0x7da18dc04670> begin[:]
call[name[log].error, parameter[constant[insufficient data for decoding chip response]]]
<ast.Raise object at 0x7da18fe939a0>
if <ast.UnaryOp object at 0x7da18fe92bf0> begin[:]
call[name[log].error, parameter[constant[received invalid chip response]]]
<ast.Raise object at 0x7da20c6c6f80>
if <ast.UnaryOp object at 0x7da20c6c67d0> begin[:]
call[name[log].error, parameter[constant[received pseudo apdu with error status]]]
<ast.Raise object at 0x7da2054a7580>
return[call[name[frame]][<ast.Slice object at 0x7da2054a7220>]] | keyword[def] identifier[command] ( identifier[self] , identifier[cmd_code] , identifier[cmd_data] , identifier[timeout] ):
literal[string]
identifier[log] . identifier[log] ( identifier[logging] . identifier[DEBUG] - literal[int] , identifier[self] . identifier[CMD] [ identifier[cmd_code] ]+ literal[string] + identifier[hexlify] ( identifier[cmd_data] ))
identifier[frame] = identifier[bytearray] ([ literal[int] , identifier[cmd_code] ])+ identifier[bytearray] ( identifier[cmd_data] )
identifier[frame] = identifier[bytearray] ([ literal[int] , literal[int] , literal[int] , literal[int] , identifier[len] ( identifier[frame] )])+ identifier[frame]
identifier[frame] = identifier[self] . identifier[ccid_xfr_block] ( identifier[frame] , identifier[timeout] )
keyword[if] keyword[not] identifier[frame] keyword[or] identifier[len] ( identifier[frame] )< literal[int] :
identifier[log] . identifier[error] ( literal[string] )
keyword[raise] identifier[IOError] ( identifier[errno] . identifier[EIO] , identifier[os] . identifier[strerror] ( identifier[errno] . identifier[EIO] ))
keyword[if] keyword[not] ( identifier[frame] [ literal[int] ]== literal[int] keyword[and] identifier[frame] [ literal[int] ]== identifier[cmd_code] + literal[int] ):
identifier[log] . identifier[error] ( literal[string] )
keyword[raise] identifier[IOError] ( identifier[errno] . identifier[EIO] , identifier[os] . identifier[strerror] ( identifier[errno] . identifier[EIO] ))
keyword[if] keyword[not] ( identifier[frame] [- literal[int] ]== literal[int] keyword[and] identifier[frame] [- literal[int] ]== literal[int] ):
identifier[log] . identifier[error] ( literal[string] )
keyword[raise] identifier[IOError] ( identifier[errno] . identifier[EIO] , identifier[os] . identifier[strerror] ( identifier[errno] . identifier[EIO] ))
keyword[return] identifier[frame] [ literal[int] :- literal[int] ] | def command(self, cmd_code, cmd_data, timeout):
"""Send a host command and return the chip response.
"""
log.log(logging.DEBUG - 1, self.CMD[cmd_code] + ' ' + hexlify(cmd_data))
frame = bytearray([212, cmd_code]) + bytearray(cmd_data)
frame = bytearray([255, 0, 0, 0, len(frame)]) + frame
frame = self.ccid_xfr_block(frame, timeout)
if not frame or len(frame) < 4:
log.error('insufficient data for decoding chip response')
raise IOError(errno.EIO, os.strerror(errno.EIO)) # depends on [control=['if'], data=[]]
if not (frame[0] == 213 and frame[1] == cmd_code + 1):
log.error('received invalid chip response')
raise IOError(errno.EIO, os.strerror(errno.EIO)) # depends on [control=['if'], data=[]]
if not (frame[-2] == 144 and frame[-1] == 0):
log.error('received pseudo apdu with error status')
raise IOError(errno.EIO, os.strerror(errno.EIO)) # depends on [control=['if'], data=[]]
return frame[2:-2] |
def fmthours (radians, norm='wrap', precision=3, seps='::'):
"""Format an angle as sexagesimal hours in a string.
Arguments are:
radians
The angle, in radians.
norm (default "wrap")
The normalization mode, used for angles outside of the standard range
of 0 to 2π. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 3)
The number of decimal places in the "seconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the hours, minutes, and
seconds components. If a third element is present, it appears after the
seconds component. Specifying "hms" yields something like "12h34m56s";
specifying ``['', '']`` yields something like "123456".
Returns a string.
"""
return _fmtsexagesimal (radians * R2H, norm, 24, seps, precision=precision) | def function[fmthours, parameter[radians, norm, precision, seps]]:
constant[Format an angle as sexagesimal hours in a string.
Arguments are:
radians
The angle, in radians.
norm (default "wrap")
The normalization mode, used for angles outside of the standard range
of 0 to 2π. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 3)
The number of decimal places in the "seconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the hours, minutes, and
seconds components. If a third element is present, it appears after the
seconds component. Specifying "hms" yields something like "12h34m56s";
specifying ``['', '']`` yields something like "123456".
Returns a string.
]
return[call[name[_fmtsexagesimal], parameter[binary_operation[name[radians] * name[R2H]], name[norm], constant[24], name[seps]]]] | keyword[def] identifier[fmthours] ( identifier[radians] , identifier[norm] = literal[string] , identifier[precision] = literal[int] , identifier[seps] = literal[string] ):
literal[string]
keyword[return] identifier[_fmtsexagesimal] ( identifier[radians] * identifier[R2H] , identifier[norm] , literal[int] , identifier[seps] , identifier[precision] = identifier[precision] ) | def fmthours(radians, norm='wrap', precision=3, seps='::'):
"""Format an angle as sexagesimal hours in a string.
Arguments are:
radians
The angle, in radians.
norm (default "wrap")
The normalization mode, used for angles outside of the standard range
of 0 to 2π. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 3)
The number of decimal places in the "seconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the hours, minutes, and
seconds components. If a third element is present, it appears after the
seconds component. Specifying "hms" yields something like "12h34m56s";
specifying ``['', '']`` yields something like "123456".
Returns a string.
"""
return _fmtsexagesimal(radians * R2H, norm, 24, seps, precision=precision) |
def _generate_transcript(self, history: List[Union[HistoryItem, str]], transcript_file: str) -> None:
"""Generate a transcript file from a given history of commands."""
import io
# Validate the transcript file path to make sure directory exists and write access is available
transcript_path = os.path.abspath(os.path.expanduser(transcript_file))
transcript_dir = os.path.dirname(transcript_path)
if not os.path.isdir(transcript_dir) or not os.access(transcript_dir, os.W_OK):
self.perror("{!r} is not a directory or you don't have write access".format(transcript_dir),
traceback_war=False)
return
try:
with self.sigint_protection:
# Disable echo while we manually redirect stdout to a StringIO buffer
saved_echo = self.echo
saved_stdout = self.stdout
self.echo = False
# The problem with supporting regular expressions in transcripts
# is that they shouldn't be processed in the command, just the output.
# In addition, when we generate a transcript, any slashes in the output
# are not really intended to indicate regular expressions, so they should
# be escaped.
#
# We have to jump through some hoops here in order to catch the commands
# separately from the output and escape the slashes in the output.
transcript = ''
for history_item in history:
# build the command, complete with prompts. When we replay
# the transcript, we look for the prompts to separate
# the command from the output
first = True
command = ''
for line in history_item.splitlines():
if first:
command += '{}{}\n'.format(self.prompt, line)
first = False
else:
command += '{}{}\n'.format(self.continuation_prompt, line)
transcript += command
# create a new string buffer and set it to stdout to catch the output
# of the command
membuf = io.StringIO()
self.stdout = membuf
# then run the command and let the output go into our buffer
self.onecmd_plus_hooks(history_item)
# rewind the buffer to the beginning
membuf.seek(0)
# get the output out of the buffer
output = membuf.read()
# and add the regex-escaped output to the transcript
transcript += output.replace('/', r'\/')
finally:
with self.sigint_protection:
# Restore altered attributes to their original state
self.echo = saved_echo
self.stdout = saved_stdout
# finally, we can write the transcript out to the file
try:
with open(transcript_file, 'w') as fout:
fout.write(transcript)
except OSError as ex:
self.perror('Failed to save transcript: {}'.format(ex), traceback_war=False)
else:
# and let the user know what we did
if len(history) > 1:
plural = 'commands and their outputs'
else:
plural = 'command and its output'
msg = '{} {} saved to transcript file {!r}'
self.pfeedback(msg.format(len(history), plural, transcript_file)) | def function[_generate_transcript, parameter[self, history, transcript_file]]:
constant[Generate a transcript file from a given history of commands.]
import module[io]
variable[transcript_path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.expanduser, parameter[name[transcript_file]]]]]
variable[transcript_dir] assign[=] call[name[os].path.dirname, parameter[name[transcript_path]]]
if <ast.BoolOp object at 0x7da18eb571f0> begin[:]
call[name[self].perror, parameter[call[constant[{!r} is not a directory or you don't have write access].format, parameter[name[transcript_dir]]]]]
return[None]
<ast.Try object at 0x7da18eb56800>
<ast.Try object at 0x7da204567070> | keyword[def] identifier[_generate_transcript] ( identifier[self] , identifier[history] : identifier[List] [ identifier[Union] [ identifier[HistoryItem] , identifier[str] ]], identifier[transcript_file] : identifier[str] )-> keyword[None] :
literal[string]
keyword[import] identifier[io]
identifier[transcript_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[transcript_file] ))
identifier[transcript_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[transcript_path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[transcript_dir] ) keyword[or] keyword[not] identifier[os] . identifier[access] ( identifier[transcript_dir] , identifier[os] . identifier[W_OK] ):
identifier[self] . identifier[perror] ( literal[string] . identifier[format] ( identifier[transcript_dir] ),
identifier[traceback_war] = keyword[False] )
keyword[return]
keyword[try] :
keyword[with] identifier[self] . identifier[sigint_protection] :
identifier[saved_echo] = identifier[self] . identifier[echo]
identifier[saved_stdout] = identifier[self] . identifier[stdout]
identifier[self] . identifier[echo] = keyword[False]
identifier[transcript] = literal[string]
keyword[for] identifier[history_item] keyword[in] identifier[history] :
identifier[first] = keyword[True]
identifier[command] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[history_item] . identifier[splitlines] ():
keyword[if] identifier[first] :
identifier[command] += literal[string] . identifier[format] ( identifier[self] . identifier[prompt] , identifier[line] )
identifier[first] = keyword[False]
keyword[else] :
identifier[command] += literal[string] . identifier[format] ( identifier[self] . identifier[continuation_prompt] , identifier[line] )
identifier[transcript] += identifier[command]
identifier[membuf] = identifier[io] . identifier[StringIO] ()
identifier[self] . identifier[stdout] = identifier[membuf]
identifier[self] . identifier[onecmd_plus_hooks] ( identifier[history_item] )
identifier[membuf] . identifier[seek] ( literal[int] )
identifier[output] = identifier[membuf] . identifier[read] ()
identifier[transcript] += identifier[output] . identifier[replace] ( literal[string] , literal[string] )
keyword[finally] :
keyword[with] identifier[self] . identifier[sigint_protection] :
identifier[self] . identifier[echo] = identifier[saved_echo]
identifier[self] . identifier[stdout] = identifier[saved_stdout]
keyword[try] :
keyword[with] identifier[open] ( identifier[transcript_file] , literal[string] ) keyword[as] identifier[fout] :
identifier[fout] . identifier[write] ( identifier[transcript] )
keyword[except] identifier[OSError] keyword[as] identifier[ex] :
identifier[self] . identifier[perror] ( literal[string] . identifier[format] ( identifier[ex] ), identifier[traceback_war] = keyword[False] )
keyword[else] :
keyword[if] identifier[len] ( identifier[history] )> literal[int] :
identifier[plural] = literal[string]
keyword[else] :
identifier[plural] = literal[string]
identifier[msg] = literal[string]
identifier[self] . identifier[pfeedback] ( identifier[msg] . identifier[format] ( identifier[len] ( identifier[history] ), identifier[plural] , identifier[transcript_file] )) | def _generate_transcript(self, history: List[Union[HistoryItem, str]], transcript_file: str) -> None:
"""Generate a transcript file from a given history of commands."""
import io
# Validate the transcript file path to make sure directory exists and write access is available
transcript_path = os.path.abspath(os.path.expanduser(transcript_file))
transcript_dir = os.path.dirname(transcript_path)
if not os.path.isdir(transcript_dir) or not os.access(transcript_dir, os.W_OK):
self.perror("{!r} is not a directory or you don't have write access".format(transcript_dir), traceback_war=False)
return # depends on [control=['if'], data=[]]
try:
with self.sigint_protection:
# Disable echo while we manually redirect stdout to a StringIO buffer
saved_echo = self.echo
saved_stdout = self.stdout
self.echo = False # depends on [control=['with'], data=[]]
# The problem with supporting regular expressions in transcripts
# is that they shouldn't be processed in the command, just the output.
# In addition, when we generate a transcript, any slashes in the output
# are not really intended to indicate regular expressions, so they should
# be escaped.
#
# We have to jump through some hoops here in order to catch the commands
# separately from the output and escape the slashes in the output.
transcript = ''
for history_item in history:
# build the command, complete with prompts. When we replay
# the transcript, we look for the prompts to separate
# the command from the output
first = True
command = ''
for line in history_item.splitlines():
if first:
command += '{}{}\n'.format(self.prompt, line)
first = False # depends on [control=['if'], data=[]]
else:
command += '{}{}\n'.format(self.continuation_prompt, line) # depends on [control=['for'], data=['line']]
transcript += command
# create a new string buffer and set it to stdout to catch the output
# of the command
membuf = io.StringIO()
self.stdout = membuf
# then run the command and let the output go into our buffer
self.onecmd_plus_hooks(history_item)
# rewind the buffer to the beginning
membuf.seek(0)
# get the output out of the buffer
output = membuf.read()
# and add the regex-escaped output to the transcript
transcript += output.replace('/', '\\/') # depends on [control=['for'], data=['history_item']] # depends on [control=['try'], data=[]]
finally:
with self.sigint_protection:
# Restore altered attributes to their original state
self.echo = saved_echo
self.stdout = saved_stdout # depends on [control=['with'], data=[]]
# finally, we can write the transcript out to the file
try:
with open(transcript_file, 'w') as fout:
fout.write(transcript) # depends on [control=['with'], data=['fout']] # depends on [control=['try'], data=[]]
except OSError as ex:
self.perror('Failed to save transcript: {}'.format(ex), traceback_war=False) # depends on [control=['except'], data=['ex']]
else:
# and let the user know what we did
if len(history) > 1:
plural = 'commands and their outputs' # depends on [control=['if'], data=[]]
else:
plural = 'command and its output'
msg = '{} {} saved to transcript file {!r}'
self.pfeedback(msg.format(len(history), plural, transcript_file)) |
def _add_base_info(self, event_dict):
"""
Instead of using a processor, adding basic information like caller, filename etc
here.
"""
f = sys._getframe()
level_method_frame = f.f_back
caller_frame = level_method_frame.f_back
return event_dict | def function[_add_base_info, parameter[self, event_dict]]:
constant[
Instead of using a processor, adding basic information like caller, filename etc
here.
]
variable[f] assign[=] call[name[sys]._getframe, parameter[]]
variable[level_method_frame] assign[=] name[f].f_back
variable[caller_frame] assign[=] name[level_method_frame].f_back
return[name[event_dict]] | keyword[def] identifier[_add_base_info] ( identifier[self] , identifier[event_dict] ):
literal[string]
identifier[f] = identifier[sys] . identifier[_getframe] ()
identifier[level_method_frame] = identifier[f] . identifier[f_back]
identifier[caller_frame] = identifier[level_method_frame] . identifier[f_back]
keyword[return] identifier[event_dict] | def _add_base_info(self, event_dict):
"""
Instead of using a processor, adding basic information like caller, filename etc
here.
"""
f = sys._getframe()
level_method_frame = f.f_back
caller_frame = level_method_frame.f_back
return event_dict |
def parse_command_line():
""" Parse CLI args."""
## create the parser
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
* Example command-line usage:
## push test branch to conda --label=conda-test for travis CI
./versioner.py -p toytree -b test -t 0.1.7
## push master as a new tag to git and conda
./versioner.py -p toytree -b master -t 0.1.7 --deploy
## build other deps on conda at --label=conda-test
./versioner.py -p toyplot --no-git
./versioner.py -p pypng --no-git
""")
## add arguments
parser.add_argument('-v', '--version', action='version',
version="0.1")
parser.add_argument('-p', #"--package",
dest="package",
default="toytree",
type=str,
help="the tag to put in __init__ and use on conda")
parser.add_argument('-b', #"--branch",
dest="branch",
default="master",
type=str,
help="the branch to build conda package from")
parser.add_argument('-t', #"--tag",
dest="tag",
default="test",
type=str,
help="the tag to put in __init__ and use on conda")
parser.add_argument("--deploy",
dest="deploy",
action='store_true',
help="push the tag to git and upload to conda main label")
parser.add_argument("--no-git",
dest="nogit",
action='store_true',
help="skip git update and only build/upload to conda")
## if no args then return help message
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
## parse args
args = parser.parse_args()
return args | def function[parse_command_line, parameter[]]:
constant[ Parse CLI args.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[-v], constant[--version]]]
call[name[parser].add_argument, parameter[constant[-p]]]
call[name[parser].add_argument, parameter[constant[-b]]]
call[name[parser].add_argument, parameter[constant[-t]]]
call[name[parser].add_argument, parameter[constant[--deploy]]]
call[name[parser].add_argument, parameter[constant[--no-git]]]
if compare[call[name[len], parameter[name[sys].argv]] equal[==] constant[1]] begin[:]
call[name[parser].print_help, parameter[]]
call[name[sys].exit, parameter[constant[1]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
return[name[args]] | keyword[def] identifier[parse_command_line] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[formatter_class] = identifier[argparse] . identifier[RawDescriptionHelpFormatter] ,
identifier[epilog] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[version] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[dest] = literal[string] ,
identifier[default] = literal[string] ,
identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[dest] = literal[string] ,
identifier[default] = literal[string] ,
identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[dest] = literal[string] ,
identifier[default] = literal[string] ,
identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[dest] = literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[dest] = literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string] )
keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )== literal[int] :
identifier[parser] . identifier[print_help] ()
identifier[sys] . identifier[exit] ( literal[int] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[return] identifier[args] | def parse_command_line():
""" Parse CLI args."""
## create the parser
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, epilog='\n * Example command-line usage: \n\n ## push test branch to conda --label=conda-test for travis CI\n ./versioner.py -p toytree -b test -t 0.1.7 \n\n ## push master as a new tag to git and conda\n ./versioner.py -p toytree -b master -t 0.1.7 --deploy\n\n ## build other deps on conda at --label=conda-test\n ./versioner.py -p toyplot --no-git\n ./versioner.py -p pypng --no-git\n\n ') ## add arguments
parser.add_argument('-v', '--version', action='version', version='0.1') #"--package",
parser.add_argument('-p', dest='package', default='toytree', type=str, help='the tag to put in __init__ and use on conda') #"--branch",
parser.add_argument('-b', dest='branch', default='master', type=str, help='the branch to build conda package from') #"--tag",
parser.add_argument('-t', dest='tag', default='test', type=str, help='the tag to put in __init__ and use on conda')
parser.add_argument('--deploy', dest='deploy', action='store_true', help='push the tag to git and upload to conda main label')
parser.add_argument('--no-git', dest='nogit', action='store_true', help='skip git update and only build/upload to conda')
## if no args then return help message
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1) # depends on [control=['if'], data=[]]
## parse args
args = parser.parse_args()
return args |
def main():
"""Main function"""
# **** For Pytest ****
# We need to create MainWindow **here** to avoid passing pytest
# options to Spyder
if running_under_pytest():
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock # Python 2
options = Mock()
options.working_directory = None
options.profile = False
options.multithreaded = False
options.new_instance = False
options.project = None
options.window_title = None
options.opengl_implementation = None
options.debug_info = None
options.debug_output = None
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
app = initialize()
window = run_spyder(app, options, None)
return window
# **** Collect command line options ****
# Note regarding Options:
# It's important to collect options before monkey patching sys.exit,
# otherwise, argparse won't be able to exit if --help option is passed
options, args = get_options()
# **** Set OpenGL implementation to use ****
if options.opengl_implementation:
option = options.opengl_implementation
set_opengl_implementation(option)
else:
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
# **** Handle hide_console option ****
if options.show_console:
print("(Deprecated) --show console does nothing, now the default "
" behavior is to show the console, use --hide-console if you "
"want to hide it")
if set_attached_console_visible is not None:
set_attached_console_visible(not options.hide_console
or options.reset_config_files
or options.reset_to_defaults
or options.optimize
or bool(get_debug_level()))
# **** Set debugging info ****
setup_logging(options)
# **** Create the application ****
app = initialize()
# **** Handle other options ****
if options.reset_config_files:
# <!> Remove all configuration files!
reset_config_files()
return
elif options.reset_to_defaults:
# Reset Spyder settings to defaults
CONF.reset_to_defaults(save=True)
return
elif options.optimize:
# Optimize the whole Spyder's source code directory
import spyder
programs.run_python_script(module="compileall",
args=[spyder.__path__[0]], p_args=['-O'])
return
# **** Show crash dialog ****
if CONF.get('main', 'crash', False) and not DEV:
CONF.set('main', 'crash', False)
if SPLASH is not None:
SPLASH.hide()
QMessageBox.information(
None, "Spyder",
"Spyder crashed during last session.<br><br>"
"If Spyder does not start at all and <u>before submitting a "
"bug report</u>, please try to reset settings to defaults by "
"running Spyder with the command line option '--reset':<br>"
"<span style=\'color: #555555\'><b>spyder --reset</b></span>"
"<br><br>"
"<span style=\'color: #ff5555\'><b>Warning:</b></span> "
"this command will remove all your Spyder configuration files "
"located in '%s').<br><br>"
"If Spyder still fails to launch, you should consult our "
"comprehensive <b><a href=\"%s\">Troubleshooting Guide</a></b>, "
"which when followed carefully solves the vast majority of "
"crashes; also, take "
"the time to search for <a href=\"%s\">known bugs</a> or "
"<a href=\"%s\">discussions</a> matching your situation before "
"submitting a report to our <a href=\"%s\">issue tracker</a>. "
"Your feedback will always be greatly appreciated."
"" % (get_conf_path(), __trouble_url__, __project_url__,
__forum_url__, __project_url__))
# **** Create main window ****
mainwindow = None
try:
mainwindow = run_spyder(app, options, args)
except FontError as fontError:
QMessageBox.information(None, "Spyder",
"Spyder was unable to load the <i>Spyder 3</i> "
"icon theme. That's why it's going to fallback to the "
"theme used in Spyder 2.<br><br>"
"For that, please close this window and start Spyder again.")
CONF.set('appearance', 'icon_theme', 'spyder 2')
except BaseException:
CONF.set('main', 'crash', True)
import traceback
traceback.print_exc(file=STDERR)
traceback.print_exc(file=open('spyder_crash.log', 'w'))
if mainwindow is None:
# An exception occured
if SPLASH is not None:
SPLASH.hide()
return
ORIGINAL_SYS_EXIT() | def function[main, parameter[]]:
constant[Main function]
if call[name[running_under_pytest], parameter[]] begin[:]
<ast.Try object at 0x7da207f03cd0>
variable[options] assign[=] call[name[Mock], parameter[]]
name[options].working_directory assign[=] constant[None]
name[options].profile assign[=] constant[False]
name[options].multithreaded assign[=] constant[False]
name[options].new_instance assign[=] constant[False]
name[options].project assign[=] constant[None]
name[options].window_title assign[=] constant[None]
name[options].opengl_implementation assign[=] constant[None]
name[options].debug_info assign[=] constant[None]
name[options].debug_output assign[=] constant[None]
if compare[call[name[CONF].get, parameter[constant[main], constant[opengl]]] not_equal[!=] constant[automatic]] begin[:]
variable[option] assign[=] call[name[CONF].get, parameter[constant[main], constant[opengl]]]
call[name[set_opengl_implementation], parameter[name[option]]]
variable[app] assign[=] call[name[initialize], parameter[]]
variable[window] assign[=] call[name[run_spyder], parameter[name[app], name[options], constant[None]]]
return[name[window]]
<ast.Tuple object at 0x7da207f03af0> assign[=] call[name[get_options], parameter[]]
if name[options].opengl_implementation begin[:]
variable[option] assign[=] name[options].opengl_implementation
call[name[set_opengl_implementation], parameter[name[option]]]
if name[options].show_console begin[:]
call[name[print], parameter[constant[(Deprecated) --show console does nothing, now the default behavior is to show the console, use --hide-console if you want to hide it]]]
if compare[name[set_attached_console_visible] is_not constant[None]] begin[:]
call[name[set_attached_console_visible], parameter[<ast.BoolOp object at 0x7da207f010c0>]]
call[name[setup_logging], parameter[name[options]]]
variable[app] assign[=] call[name[initialize], parameter[]]
if name[options].reset_config_files begin[:]
call[name[reset_config_files], parameter[]]
return[None]
if <ast.BoolOp object at 0x7da207f03280> begin[:]
call[name[CONF].set, parameter[constant[main], constant[crash], constant[False]]]
if compare[name[SPLASH] is_not constant[None]] begin[:]
call[name[SPLASH].hide, parameter[]]
call[name[QMessageBox].information, parameter[constant[None], constant[Spyder], binary_operation[constant[Spyder crashed during last session.<br><br>If Spyder does not start at all and <u>before submitting a bug report</u>, please try to reset settings to defaults by running Spyder with the command line option '--reset':<br><span style='color: #555555'><b>spyder --reset</b></span><br><br><span style='color: #ff5555'><b>Warning:</b></span> this command will remove all your Spyder configuration files located in '%s').<br><br>If Spyder still fails to launch, you should consult our comprehensive <b><a href="%s">Troubleshooting Guide</a></b>, which when followed carefully solves the vast majority of crashes; also, take the time to search for <a href="%s">known bugs</a> or <a href="%s">discussions</a> matching your situation before submitting a report to our <a href="%s">issue tracker</a>. Your feedback will always be greatly appreciated.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f00070>, <ast.Name object at 0x7da207f01780>, <ast.Name object at 0x7da207f03640>, <ast.Name object at 0x7da207f020e0>, <ast.Name object at 0x7da207f038e0>]]]]]
variable[mainwindow] assign[=] constant[None]
<ast.Try object at 0x7da207f02470>
if compare[name[mainwindow] is constant[None]] begin[:]
if compare[name[SPLASH] is_not constant[None]] begin[:]
call[name[SPLASH].hide, parameter[]]
return[None]
call[name[ORIGINAL_SYS_EXIT], parameter[]] | keyword[def] identifier[main] ():
literal[string]
keyword[if] identifier[running_under_pytest] ():
keyword[try] :
keyword[from] identifier[unittest] . identifier[mock] keyword[import] identifier[Mock]
keyword[except] identifier[ImportError] :
keyword[from] identifier[mock] keyword[import] identifier[Mock]
identifier[options] = identifier[Mock] ()
identifier[options] . identifier[working_directory] = keyword[None]
identifier[options] . identifier[profile] = keyword[False]
identifier[options] . identifier[multithreaded] = keyword[False]
identifier[options] . identifier[new_instance] = keyword[False]
identifier[options] . identifier[project] = keyword[None]
identifier[options] . identifier[window_title] = keyword[None]
identifier[options] . identifier[opengl_implementation] = keyword[None]
identifier[options] . identifier[debug_info] = keyword[None]
identifier[options] . identifier[debug_output] = keyword[None]
keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] )!= literal[string] :
identifier[option] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] )
identifier[set_opengl_implementation] ( identifier[option] )
identifier[app] = identifier[initialize] ()
identifier[window] = identifier[run_spyder] ( identifier[app] , identifier[options] , keyword[None] )
keyword[return] identifier[window]
identifier[options] , identifier[args] = identifier[get_options] ()
keyword[if] identifier[options] . identifier[opengl_implementation] :
identifier[option] = identifier[options] . identifier[opengl_implementation]
identifier[set_opengl_implementation] ( identifier[option] )
keyword[else] :
keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] )!= literal[string] :
identifier[option] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] )
identifier[set_opengl_implementation] ( identifier[option] )
keyword[if] identifier[options] . identifier[show_console] :
identifier[print] ( literal[string]
literal[string]
literal[string] )
keyword[if] identifier[set_attached_console_visible] keyword[is] keyword[not] keyword[None] :
identifier[set_attached_console_visible] ( keyword[not] identifier[options] . identifier[hide_console]
keyword[or] identifier[options] . identifier[reset_config_files]
keyword[or] identifier[options] . identifier[reset_to_defaults]
keyword[or] identifier[options] . identifier[optimize]
keyword[or] identifier[bool] ( identifier[get_debug_level] ()))
identifier[setup_logging] ( identifier[options] )
identifier[app] = identifier[initialize] ()
keyword[if] identifier[options] . identifier[reset_config_files] :
identifier[reset_config_files] ()
keyword[return]
keyword[elif] identifier[options] . identifier[reset_to_defaults] :
identifier[CONF] . identifier[reset_to_defaults] ( identifier[save] = keyword[True] )
keyword[return]
keyword[elif] identifier[options] . identifier[optimize] :
keyword[import] identifier[spyder]
identifier[programs] . identifier[run_python_script] ( identifier[module] = literal[string] ,
identifier[args] =[ identifier[spyder] . identifier[__path__] [ literal[int] ]], identifier[p_args] =[ literal[string] ])
keyword[return]
keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] , keyword[False] ) keyword[and] keyword[not] identifier[DEV] :
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , keyword[False] )
keyword[if] identifier[SPLASH] keyword[is] keyword[not] keyword[None] :
identifier[SPLASH] . identifier[hide] ()
identifier[QMessageBox] . identifier[information] (
keyword[None] , literal[string] ,
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] %( identifier[get_conf_path] (), identifier[__trouble_url__] , identifier[__project_url__] ,
identifier[__forum_url__] , identifier[__project_url__] ))
identifier[mainwindow] = keyword[None]
keyword[try] :
identifier[mainwindow] = identifier[run_spyder] ( identifier[app] , identifier[options] , identifier[args] )
keyword[except] identifier[FontError] keyword[as] identifier[fontError] :
identifier[QMessageBox] . identifier[information] ( keyword[None] , literal[string] ,
literal[string]
literal[string]
literal[string]
literal[string] )
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , literal[string] )
keyword[except] identifier[BaseException] :
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , keyword[True] )
keyword[import] identifier[traceback]
identifier[traceback] . identifier[print_exc] ( identifier[file] = identifier[STDERR] )
identifier[traceback] . identifier[print_exc] ( identifier[file] = identifier[open] ( literal[string] , literal[string] ))
keyword[if] identifier[mainwindow] keyword[is] keyword[None] :
keyword[if] identifier[SPLASH] keyword[is] keyword[not] keyword[None] :
identifier[SPLASH] . identifier[hide] ()
keyword[return]
identifier[ORIGINAL_SYS_EXIT] () | def main():
"""Main function""" # **** For Pytest ****
# We need to create MainWindow **here** to avoid passing pytest
# options to Spyder
if running_under_pytest():
try:
from unittest.mock import Mock # depends on [control=['try'], data=[]]
except ImportError:
from mock import Mock # Python 2 # depends on [control=['except'], data=[]]
options = Mock()
options.working_directory = None
options.profile = False
options.multithreaded = False
options.new_instance = False
options.project = None
options.window_title = None
options.opengl_implementation = None
options.debug_info = None
options.debug_output = None
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option) # depends on [control=['if'], data=[]]
app = initialize()
window = run_spyder(app, options, None)
return window # depends on [control=['if'], data=[]] # **** Collect command line options ****
# Note regarding Options:
# It's important to collect options before monkey patching sys.exit,
# otherwise, argparse won't be able to exit if --help option is passed
(options, args) = get_options() # **** Set OpenGL implementation to use ****
if options.opengl_implementation:
option = options.opengl_implementation
set_opengl_implementation(option) # depends on [control=['if'], data=[]]
elif CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option) # depends on [control=['if'], data=[]] # **** Handle hide_console option ****
if options.show_console:
print('(Deprecated) --show console does nothing, now the default behavior is to show the console, use --hide-console if you want to hide it') # depends on [control=['if'], data=[]]
if set_attached_console_visible is not None:
set_attached_console_visible(not options.hide_console or options.reset_config_files or options.reset_to_defaults or options.optimize or bool(get_debug_level())) # depends on [control=['if'], data=['set_attached_console_visible']] # **** Set debugging info ****
setup_logging(options) # **** Create the application ****
app = initialize() # **** Handle other options ****
if options.reset_config_files: # <!> Remove all configuration files!
reset_config_files()
return # depends on [control=['if'], data=[]]
elif options.reset_to_defaults: # Reset Spyder settings to defaults
CONF.reset_to_defaults(save=True)
return # depends on [control=['if'], data=[]]
elif options.optimize: # Optimize the whole Spyder's source code directory
import spyder
programs.run_python_script(module='compileall', args=[spyder.__path__[0]], p_args=['-O'])
return # depends on [control=['if'], data=[]] # **** Show crash dialog ****
if CONF.get('main', 'crash', False) and (not DEV):
CONF.set('main', 'crash', False)
if SPLASH is not None:
SPLASH.hide() # depends on [control=['if'], data=['SPLASH']]
QMessageBox.information(None, 'Spyder', 'Spyder crashed during last session.<br><br>If Spyder does not start at all and <u>before submitting a bug report</u>, please try to reset settings to defaults by running Spyder with the command line option \'--reset\':<br><span style=\'color: #555555\'><b>spyder --reset</b></span><br><br><span style=\'color: #ff5555\'><b>Warning:</b></span> this command will remove all your Spyder configuration files located in \'%s\').<br><br>If Spyder still fails to launch, you should consult our comprehensive <b><a href="%s">Troubleshooting Guide</a></b>, which when followed carefully solves the vast majority of crashes; also, take the time to search for <a href="%s">known bugs</a> or <a href="%s">discussions</a> matching your situation before submitting a report to our <a href="%s">issue tracker</a>. Your feedback will always be greatly appreciated.' % (get_conf_path(), __trouble_url__, __project_url__, __forum_url__, __project_url__)) # depends on [control=['if'], data=[]] # **** Create main window ****
mainwindow = None
try:
mainwindow = run_spyder(app, options, args) # depends on [control=['try'], data=[]]
except FontError as fontError:
QMessageBox.information(None, 'Spyder', "Spyder was unable to load the <i>Spyder 3</i> icon theme. That's why it's going to fallback to the theme used in Spyder 2.<br><br>For that, please close this window and start Spyder again.")
CONF.set('appearance', 'icon_theme', 'spyder 2') # depends on [control=['except'], data=[]]
except BaseException:
CONF.set('main', 'crash', True)
import traceback
traceback.print_exc(file=STDERR)
traceback.print_exc(file=open('spyder_crash.log', 'w')) # depends on [control=['except'], data=[]]
if mainwindow is None: # An exception occured
if SPLASH is not None:
SPLASH.hide() # depends on [control=['if'], data=['SPLASH']]
return # depends on [control=['if'], data=[]]
ORIGINAL_SYS_EXIT() |
def cond_entropy(x, y, bins_y=None, bins_xy=None, method='nearest-neighbors', units='bits'):
'''
compute the conditional entropy H(X|Y).
method: 'nearest-neighbors', 'gaussian', or 'bin'
if 'bin' need to provide bins_y, and bins_xy
units: 'bits' or 'nats'
'''
HXY = entropy(data=np.concatenate([x, y], axis=1), bins=bins_xy, method=method, units=units)
HY = entropy(data=y, bins=bins_y, method=method, units=units)
return HXY - HY | def function[cond_entropy, parameter[x, y, bins_y, bins_xy, method, units]]:
constant[
compute the conditional entropy H(X|Y).
method: 'nearest-neighbors', 'gaussian', or 'bin'
if 'bin' need to provide bins_y, and bins_xy
units: 'bits' or 'nats'
]
variable[HXY] assign[=] call[name[entropy], parameter[]]
variable[HY] assign[=] call[name[entropy], parameter[]]
return[binary_operation[name[HXY] - name[HY]]] | keyword[def] identifier[cond_entropy] ( identifier[x] , identifier[y] , identifier[bins_y] = keyword[None] , identifier[bins_xy] = keyword[None] , identifier[method] = literal[string] , identifier[units] = literal[string] ):
literal[string]
identifier[HXY] = identifier[entropy] ( identifier[data] = identifier[np] . identifier[concatenate] ([ identifier[x] , identifier[y] ], identifier[axis] = literal[int] ), identifier[bins] = identifier[bins_xy] , identifier[method] = identifier[method] , identifier[units] = identifier[units] )
identifier[HY] = identifier[entropy] ( identifier[data] = identifier[y] , identifier[bins] = identifier[bins_y] , identifier[method] = identifier[method] , identifier[units] = identifier[units] )
keyword[return] identifier[HXY] - identifier[HY] | def cond_entropy(x, y, bins_y=None, bins_xy=None, method='nearest-neighbors', units='bits'):
"""
compute the conditional entropy H(X|Y).
method: 'nearest-neighbors', 'gaussian', or 'bin'
if 'bin' need to provide bins_y, and bins_xy
units: 'bits' or 'nats'
"""
HXY = entropy(data=np.concatenate([x, y], axis=1), bins=bins_xy, method=method, units=units)
HY = entropy(data=y, bins=bins_y, method=method, units=units)
return HXY - HY |
def list_file(self, commit, path, recursive=False):
"""
Lists the files in a directory.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the directory.
* recursive: If True, continue listing the files for sub-directories.
"""
req = proto.ListFileRequest(
file=proto.File(commit=commit_from(commit), path=path)
)
res = self.stub.ListFile(req, metadata=self.metadata)
file_infos = res.file_info
if recursive:
dirs = [f for f in file_infos if f.file_type == proto.DIR]
files = [f for f in file_infos if f.file_type == proto.FILE]
return sum([self.list_file(commit, d.file.path, recursive) for d in dirs], files)
return list(file_infos) | def function[list_file, parameter[self, commit, path, recursive]]:
constant[
Lists the files in a directory.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the directory.
* recursive: If True, continue listing the files for sub-directories.
]
variable[req] assign[=] call[name[proto].ListFileRequest, parameter[]]
variable[res] assign[=] call[name[self].stub.ListFile, parameter[name[req]]]
variable[file_infos] assign[=] name[res].file_info
if name[recursive] begin[:]
variable[dirs] assign[=] <ast.ListComp object at 0x7da204623ee0>
variable[files] assign[=] <ast.ListComp object at 0x7da204623f40>
return[call[name[sum], parameter[<ast.ListComp object at 0x7da204620100>, name[files]]]]
return[call[name[list], parameter[name[file_infos]]]] | keyword[def] identifier[list_file] ( identifier[self] , identifier[commit] , identifier[path] , identifier[recursive] = keyword[False] ):
literal[string]
identifier[req] = identifier[proto] . identifier[ListFileRequest] (
identifier[file] = identifier[proto] . identifier[File] ( identifier[commit] = identifier[commit_from] ( identifier[commit] ), identifier[path] = identifier[path] )
)
identifier[res] = identifier[self] . identifier[stub] . identifier[ListFile] ( identifier[req] , identifier[metadata] = identifier[self] . identifier[metadata] )
identifier[file_infos] = identifier[res] . identifier[file_info]
keyword[if] identifier[recursive] :
identifier[dirs] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[file_infos] keyword[if] identifier[f] . identifier[file_type] == identifier[proto] . identifier[DIR] ]
identifier[files] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[file_infos] keyword[if] identifier[f] . identifier[file_type] == identifier[proto] . identifier[FILE] ]
keyword[return] identifier[sum] ([ identifier[self] . identifier[list_file] ( identifier[commit] , identifier[d] . identifier[file] . identifier[path] , identifier[recursive] ) keyword[for] identifier[d] keyword[in] identifier[dirs] ], identifier[files] )
keyword[return] identifier[list] ( identifier[file_infos] ) | def list_file(self, commit, path, recursive=False):
"""
Lists the files in a directory.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the directory.
* recursive: If True, continue listing the files for sub-directories.
"""
req = proto.ListFileRequest(file=proto.File(commit=commit_from(commit), path=path))
res = self.stub.ListFile(req, metadata=self.metadata)
file_infos = res.file_info
if recursive:
dirs = [f for f in file_infos if f.file_type == proto.DIR]
files = [f for f in file_infos if f.file_type == proto.FILE]
return sum([self.list_file(commit, d.file.path, recursive) for d in dirs], files) # depends on [control=['if'], data=[]]
return list(file_infos) |
def set_exception(self, exception):
"""Sets the exception on the future."""
if not self.done():
raise TransferNotDoneError(
'set_exception can only be called once the transfer is '
'complete.')
self._coordinator.set_exception(exception, override=True) | def function[set_exception, parameter[self, exception]]:
constant[Sets the exception on the future.]
if <ast.UnaryOp object at 0x7da2041dae60> begin[:]
<ast.Raise object at 0x7da2041dbee0>
call[name[self]._coordinator.set_exception, parameter[name[exception]]] | keyword[def] identifier[set_exception] ( identifier[self] , identifier[exception] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[done] ():
keyword[raise] identifier[TransferNotDoneError] (
literal[string]
literal[string] )
identifier[self] . identifier[_coordinator] . identifier[set_exception] ( identifier[exception] , identifier[override] = keyword[True] ) | def set_exception(self, exception):
"""Sets the exception on the future."""
if not self.done():
raise TransferNotDoneError('set_exception can only be called once the transfer is complete.') # depends on [control=['if'], data=[]]
self._coordinator.set_exception(exception, override=True) |
def _generate_feed(self, feed_data):
""" render feed file with data
"""
atom_feed = self._render_html('atom.xml', feed_data)
feed_path = os.path.join(os.getcwd(), 'public', 'atom.xml')
with codecs.open(feed_path, 'wb', 'utf-8') as f:
f.write(atom_feed) | def function[_generate_feed, parameter[self, feed_data]]:
constant[ render feed file with data
]
variable[atom_feed] assign[=] call[name[self]._render_html, parameter[constant[atom.xml], name[feed_data]]]
variable[feed_path] assign[=] call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], constant[public], constant[atom.xml]]]
with call[name[codecs].open, parameter[name[feed_path], constant[wb], constant[utf-8]]] begin[:]
call[name[f].write, parameter[name[atom_feed]]] | keyword[def] identifier[_generate_feed] ( identifier[self] , identifier[feed_data] ):
literal[string]
identifier[atom_feed] = identifier[self] . identifier[_render_html] ( literal[string] , identifier[feed_data] )
identifier[feed_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), literal[string] , literal[string] )
keyword[with] identifier[codecs] . identifier[open] ( identifier[feed_path] , literal[string] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[atom_feed] ) | def _generate_feed(self, feed_data):
""" render feed file with data
"""
atom_feed = self._render_html('atom.xml', feed_data)
feed_path = os.path.join(os.getcwd(), 'public', 'atom.xml')
with codecs.open(feed_path, 'wb', 'utf-8') as f:
f.write(atom_feed) # depends on [control=['with'], data=['f']] |
def cmd(send, msg, args):
"""Converts text into NATO form.
Syntax: {command} <text>
"""
if not msg:
send("NATO what?")
return
nato = gen_nato(msg)
if len(nato) > 100:
send("Your NATO is too long. Have you considered letters?")
else:
send(nato) | def function[cmd, parameter[send, msg, args]]:
constant[Converts text into NATO form.
Syntax: {command} <text>
]
if <ast.UnaryOp object at 0x7da1b20d4550> begin[:]
call[name[send], parameter[constant[NATO what?]]]
return[None]
variable[nato] assign[=] call[name[gen_nato], parameter[name[msg]]]
if compare[call[name[len], parameter[name[nato]]] greater[>] constant[100]] begin[:]
call[name[send], parameter[constant[Your NATO is too long. Have you considered letters?]]] | keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[msg] :
identifier[send] ( literal[string] )
keyword[return]
identifier[nato] = identifier[gen_nato] ( identifier[msg] )
keyword[if] identifier[len] ( identifier[nato] )> literal[int] :
identifier[send] ( literal[string] )
keyword[else] :
identifier[send] ( identifier[nato] ) | def cmd(send, msg, args):
"""Converts text into NATO form.
Syntax: {command} <text>
"""
if not msg:
send('NATO what?')
return # depends on [control=['if'], data=[]]
nato = gen_nato(msg)
if len(nato) > 100:
send('Your NATO is too long. Have you considered letters?') # depends on [control=['if'], data=[]]
else:
send(nato) |
def domain(host, port, username, password, avail_timeout=240.):
"""
Endpoint domain.
:param str host:
Endpoint host.
:param int port:
Endpoint port.
:param str username:
Endpoint username.
:param str password:
Endpoint password.
:param float avail_timeout:
Availability check timeout in seconds.
:rtype:
Domain
:return:
Domain for the given endpoint parameters.
"""
return Domain(Endpoint(host, port, username, password), avail_timeout) | def function[domain, parameter[host, port, username, password, avail_timeout]]:
constant[
Endpoint domain.
:param str host:
Endpoint host.
:param int port:
Endpoint port.
:param str username:
Endpoint username.
:param str password:
Endpoint password.
:param float avail_timeout:
Availability check timeout in seconds.
:rtype:
Domain
:return:
Domain for the given endpoint parameters.
]
return[call[name[Domain], parameter[call[name[Endpoint], parameter[name[host], name[port], name[username], name[password]]], name[avail_timeout]]]] | keyword[def] identifier[domain] ( identifier[host] , identifier[port] , identifier[username] , identifier[password] , identifier[avail_timeout] = literal[int] ):
literal[string]
keyword[return] identifier[Domain] ( identifier[Endpoint] ( identifier[host] , identifier[port] , identifier[username] , identifier[password] ), identifier[avail_timeout] ) | def domain(host, port, username, password, avail_timeout=240.0):
"""
Endpoint domain.
:param str host:
Endpoint host.
:param int port:
Endpoint port.
:param str username:
Endpoint username.
:param str password:
Endpoint password.
:param float avail_timeout:
Availability check timeout in seconds.
:rtype:
Domain
:return:
Domain for the given endpoint parameters.
"""
return Domain(Endpoint(host, port, username, password), avail_timeout) |
def update_empty_fields(self, **kwargs):
"""Updates the field of info about an OTU that might not be filled in by a match_names or taxon call."""
if self._is_deprecated is None:
self._is_deprecated = kwargs.get('is_deprecated')
if self._is_dubious is None:
self._is_dubious = kwargs.get('is_dubious')
if self._is_synonym is None:
self._is_synonym = kwargs.get('is_synonym')
if self._synonyms is _EMPTY_TUPLE:
self._synonyms = kwargs.get('synonyms')
if self._synonyms is None:
self._synonyms = _EMPTY_TUPLE
if self.rank is None:
self._rank = kwargs.get('rank')
if self._nomenclature_code:
self._nomenclature_code = kwargs.get('nomenclature_code')
if not self._unique_name:
self._unique_name = kwargs.get('unique_name')
if self._taxonomic_lineage is None:
self._taxonomic_lineage = kwargs.get('taxonomic_lineage')
if self._parent is None:
self._parent = kwargs.get('parent')
if self._parent is None and self._taxomachine_wrapper is not None and self._taxonomic_lineage:
self._fill_parent_attr() | def function[update_empty_fields, parameter[self]]:
constant[Updates the field of info about an OTU that might not be filled in by a match_names or taxon call.]
if compare[name[self]._is_deprecated is constant[None]] begin[:]
name[self]._is_deprecated assign[=] call[name[kwargs].get, parameter[constant[is_deprecated]]]
if compare[name[self]._is_dubious is constant[None]] begin[:]
name[self]._is_dubious assign[=] call[name[kwargs].get, parameter[constant[is_dubious]]]
if compare[name[self]._is_synonym is constant[None]] begin[:]
name[self]._is_synonym assign[=] call[name[kwargs].get, parameter[constant[is_synonym]]]
if compare[name[self]._synonyms is name[_EMPTY_TUPLE]] begin[:]
name[self]._synonyms assign[=] call[name[kwargs].get, parameter[constant[synonyms]]]
if compare[name[self]._synonyms is constant[None]] begin[:]
name[self]._synonyms assign[=] name[_EMPTY_TUPLE]
if compare[name[self].rank is constant[None]] begin[:]
name[self]._rank assign[=] call[name[kwargs].get, parameter[constant[rank]]]
if name[self]._nomenclature_code begin[:]
name[self]._nomenclature_code assign[=] call[name[kwargs].get, parameter[constant[nomenclature_code]]]
if <ast.UnaryOp object at 0x7da18ede7bb0> begin[:]
name[self]._unique_name assign[=] call[name[kwargs].get, parameter[constant[unique_name]]]
if compare[name[self]._taxonomic_lineage is constant[None]] begin[:]
name[self]._taxonomic_lineage assign[=] call[name[kwargs].get, parameter[constant[taxonomic_lineage]]]
if compare[name[self]._parent is constant[None]] begin[:]
name[self]._parent assign[=] call[name[kwargs].get, parameter[constant[parent]]]
if <ast.BoolOp object at 0x7da18ede6a40> begin[:]
call[name[self]._fill_parent_attr, parameter[]] | keyword[def] identifier[update_empty_fields] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[_is_deprecated] keyword[is] keyword[None] :
identifier[self] . identifier[_is_deprecated] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_is_dubious] keyword[is] keyword[None] :
identifier[self] . identifier[_is_dubious] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_is_synonym] keyword[is] keyword[None] :
identifier[self] . identifier[_is_synonym] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_synonyms] keyword[is] identifier[_EMPTY_TUPLE] :
identifier[self] . identifier[_synonyms] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_synonyms] keyword[is] keyword[None] :
identifier[self] . identifier[_synonyms] = identifier[_EMPTY_TUPLE]
keyword[if] identifier[self] . identifier[rank] keyword[is] keyword[None] :
identifier[self] . identifier[_rank] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_nomenclature_code] :
identifier[self] . identifier[_nomenclature_code] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[_unique_name] :
identifier[self] . identifier[_unique_name] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_taxonomic_lineage] keyword[is] keyword[None] :
identifier[self] . identifier[_taxonomic_lineage] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_parent] keyword[is] keyword[None] :
identifier[self] . identifier[_parent] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_parent] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[_taxomachine_wrapper] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[_taxonomic_lineage] :
identifier[self] . identifier[_fill_parent_attr] () | def update_empty_fields(self, **kwargs):
"""Updates the field of info about an OTU that might not be filled in by a match_names or taxon call."""
if self._is_deprecated is None:
self._is_deprecated = kwargs.get('is_deprecated') # depends on [control=['if'], data=[]]
if self._is_dubious is None:
self._is_dubious = kwargs.get('is_dubious') # depends on [control=['if'], data=[]]
if self._is_synonym is None:
self._is_synonym = kwargs.get('is_synonym') # depends on [control=['if'], data=[]]
if self._synonyms is _EMPTY_TUPLE:
self._synonyms = kwargs.get('synonyms')
if self._synonyms is None:
self._synonyms = _EMPTY_TUPLE # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['_EMPTY_TUPLE']]
if self.rank is None:
self._rank = kwargs.get('rank') # depends on [control=['if'], data=[]]
if self._nomenclature_code:
self._nomenclature_code = kwargs.get('nomenclature_code') # depends on [control=['if'], data=[]]
if not self._unique_name:
self._unique_name = kwargs.get('unique_name') # depends on [control=['if'], data=[]]
if self._taxonomic_lineage is None:
self._taxonomic_lineage = kwargs.get('taxonomic_lineage') # depends on [control=['if'], data=[]]
if self._parent is None:
self._parent = kwargs.get('parent')
if self._parent is None and self._taxomachine_wrapper is not None and self._taxonomic_lineage:
self._fill_parent_attr() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def get_series_episode(series_id, season, episode):
"""Get an episode of a series.
:param int series_id: id of the series.
:param int season: season number of the episode.
:param int episode: episode number of the episode.
:return: the episode data.
:rtype: dict
"""
result = tvdb_client.query_series_episodes(series_id, aired_season=season, aired_episode=episode)
if result:
return tvdb_client.get_episode(result['data'][0]['id']) | def function[get_series_episode, parameter[series_id, season, episode]]:
constant[Get an episode of a series.
:param int series_id: id of the series.
:param int season: season number of the episode.
:param int episode: episode number of the episode.
:return: the episode data.
:rtype: dict
]
variable[result] assign[=] call[name[tvdb_client].query_series_episodes, parameter[name[series_id]]]
if name[result] begin[:]
return[call[name[tvdb_client].get_episode, parameter[call[call[call[name[result]][constant[data]]][constant[0]]][constant[id]]]]] | keyword[def] identifier[get_series_episode] ( identifier[series_id] , identifier[season] , identifier[episode] ):
literal[string]
identifier[result] = identifier[tvdb_client] . identifier[query_series_episodes] ( identifier[series_id] , identifier[aired_season] = identifier[season] , identifier[aired_episode] = identifier[episode] )
keyword[if] identifier[result] :
keyword[return] identifier[tvdb_client] . identifier[get_episode] ( identifier[result] [ literal[string] ][ literal[int] ][ literal[string] ]) | def get_series_episode(series_id, season, episode):
"""Get an episode of a series.
:param int series_id: id of the series.
:param int season: season number of the episode.
:param int episode: episode number of the episode.
:return: the episode data.
:rtype: dict
"""
result = tvdb_client.query_series_episodes(series_id, aired_season=season, aired_episode=episode)
if result:
return tvdb_client.get_episode(result['data'][0]['id']) # depends on [control=['if'], data=[]] |
def _run_raw(self, cmd: str, ignore_errors=False) -> Tuple[str, str]:
"""Runs given cmd in the task using current SSH session, returns
stdout/stderr as strings. Because it blocks until cmd is done, use it for
short cmds. Silently ignores failing commands.
This is a barebones method to be used during initialization that have
minimal dependencies (no tmux)
"""
# self._log("run_ssh: %s"%(cmd,))
stdin, stdout, stderr = u.call_with_retries(self.ssh_client.exec_command,
command=cmd, get_pty=True)
stdout_str = stdout.read().decode()
stderr_str = stderr.read().decode()
if stdout.channel.recv_exit_status() != 0:
if not ignore_errors:
self.log(f"command ({cmd}) failed with --->")
self.log("failing stdout: " + stdout_str)
self.log("failing stderr: " + stderr_str)
assert False, "_run_raw failed (see logs for error)"
return stdout_str, stderr_str | def function[_run_raw, parameter[self, cmd, ignore_errors]]:
constant[Runs given cmd in the task using current SSH session, returns
stdout/stderr as strings. Because it blocks until cmd is done, use it for
short cmds. Silently ignores failing commands.
This is a barebones method to be used during initialization that have
minimal dependencies (no tmux)
]
<ast.Tuple object at 0x7da18ede5420> assign[=] call[name[u].call_with_retries, parameter[name[self].ssh_client.exec_command]]
variable[stdout_str] assign[=] call[call[name[stdout].read, parameter[]].decode, parameter[]]
variable[stderr_str] assign[=] call[call[name[stderr].read, parameter[]].decode, parameter[]]
if compare[call[name[stdout].channel.recv_exit_status, parameter[]] not_equal[!=] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da18ede4af0> begin[:]
call[name[self].log, parameter[<ast.JoinedStr object at 0x7da18ede41f0>]]
call[name[self].log, parameter[binary_operation[constant[failing stdout: ] + name[stdout_str]]]]
call[name[self].log, parameter[binary_operation[constant[failing stderr: ] + name[stderr_str]]]]
assert[constant[False]]
return[tuple[[<ast.Name object at 0x7da18ede50f0>, <ast.Name object at 0x7da18ede48b0>]]] | keyword[def] identifier[_run_raw] ( identifier[self] , identifier[cmd] : identifier[str] , identifier[ignore_errors] = keyword[False] )-> identifier[Tuple] [ identifier[str] , identifier[str] ]:
literal[string]
identifier[stdin] , identifier[stdout] , identifier[stderr] = identifier[u] . identifier[call_with_retries] ( identifier[self] . identifier[ssh_client] . identifier[exec_command] ,
identifier[command] = identifier[cmd] , identifier[get_pty] = keyword[True] )
identifier[stdout_str] = identifier[stdout] . identifier[read] (). identifier[decode] ()
identifier[stderr_str] = identifier[stderr] . identifier[read] (). identifier[decode] ()
keyword[if] identifier[stdout] . identifier[channel] . identifier[recv_exit_status] ()!= literal[int] :
keyword[if] keyword[not] identifier[ignore_errors] :
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[log] ( literal[string] + identifier[stdout_str] )
identifier[self] . identifier[log] ( literal[string] + identifier[stderr_str] )
keyword[assert] keyword[False] , literal[string]
keyword[return] identifier[stdout_str] , identifier[stderr_str] | def _run_raw(self, cmd: str, ignore_errors=False) -> Tuple[str, str]:
"""Runs given cmd in the task using current SSH session, returns
stdout/stderr as strings. Because it blocks until cmd is done, use it for
short cmds. Silently ignores failing commands.
This is a barebones method to be used during initialization that have
minimal dependencies (no tmux)
"""
# self._log("run_ssh: %s"%(cmd,))
(stdin, stdout, stderr) = u.call_with_retries(self.ssh_client.exec_command, command=cmd, get_pty=True)
stdout_str = stdout.read().decode()
stderr_str = stderr.read().decode()
if stdout.channel.recv_exit_status() != 0:
if not ignore_errors:
self.log(f'command ({cmd}) failed with --->')
self.log('failing stdout: ' + stdout_str)
self.log('failing stderr: ' + stderr_str)
assert False, '_run_raw failed (see logs for error)' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return (stdout_str, stderr_str) |
def _iter_response(self, url, params=None):
"""Return an enumerable that iterates through a multi-page API request"""
if params is None:
params = {}
params['page_number'] = 1
# Last page lists itself as next page
while True:
response = self._request(url, params)
for item in response['result_data']:
yield item
# Last page lists itself as next page
if response['service_meta']['next_page_number'] == params['page_number']:
break
params['page_number'] += 1 | def function[_iter_response, parameter[self, url, params]]:
constant[Return an enumerable that iterates through a multi-page API request]
if compare[name[params] is constant[None]] begin[:]
variable[params] assign[=] dictionary[[], []]
call[name[params]][constant[page_number]] assign[=] constant[1]
while constant[True] begin[:]
variable[response] assign[=] call[name[self]._request, parameter[name[url], name[params]]]
for taget[name[item]] in starred[call[name[response]][constant[result_data]]] begin[:]
<ast.Yield object at 0x7da2044c2a10>
if compare[call[call[name[response]][constant[service_meta]]][constant[next_page_number]] equal[==] call[name[params]][constant[page_number]]] begin[:]
break
<ast.AugAssign object at 0x7da2054a7cd0> | keyword[def] identifier[_iter_response] ( identifier[self] , identifier[url] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[params] keyword[is] keyword[None] :
identifier[params] ={}
identifier[params] [ literal[string] ]= literal[int]
keyword[while] keyword[True] :
identifier[response] = identifier[self] . identifier[_request] ( identifier[url] , identifier[params] )
keyword[for] identifier[item] keyword[in] identifier[response] [ literal[string] ]:
keyword[yield] identifier[item]
keyword[if] identifier[response] [ literal[string] ][ literal[string] ]== identifier[params] [ literal[string] ]:
keyword[break]
identifier[params] [ literal[string] ]+= literal[int] | def _iter_response(self, url, params=None):
"""Return an enumerable that iterates through a multi-page API request"""
if params is None:
params = {} # depends on [control=['if'], data=['params']]
params['page_number'] = 1
# Last page lists itself as next page
while True:
response = self._request(url, params)
for item in response['result_data']:
yield item # depends on [control=['for'], data=['item']]
# Last page lists itself as next page
if response['service_meta']['next_page_number'] == params['page_number']:
break # depends on [control=['if'], data=[]]
params['page_number'] += 1 # depends on [control=['while'], data=[]] |
def DeleteJob(self, job_id, token=None):
"""Deletes cron job with the given URN."""
job_urn = self.CRON_JOBS_PATH.Add(job_id)
aff4.FACTORY.Delete(job_urn, token=token) | def function[DeleteJob, parameter[self, job_id, token]]:
constant[Deletes cron job with the given URN.]
variable[job_urn] assign[=] call[name[self].CRON_JOBS_PATH.Add, parameter[name[job_id]]]
call[name[aff4].FACTORY.Delete, parameter[name[job_urn]]] | keyword[def] identifier[DeleteJob] ( identifier[self] , identifier[job_id] , identifier[token] = keyword[None] ):
literal[string]
identifier[job_urn] = identifier[self] . identifier[CRON_JOBS_PATH] . identifier[Add] ( identifier[job_id] )
identifier[aff4] . identifier[FACTORY] . identifier[Delete] ( identifier[job_urn] , identifier[token] = identifier[token] ) | def DeleteJob(self, job_id, token=None):
"""Deletes cron job with the given URN."""
job_urn = self.CRON_JOBS_PATH.Add(job_id)
aff4.FACTORY.Delete(job_urn, token=token) |
def set_gcc():
"""
Try to find and use GCC on OSX for OpenMP support.
"""
# For macports and homebrew
patterns = ['/opt/local/bin/gcc-mp-[0-9].[0-9]',
'/opt/local/bin/gcc-mp-[0-9]',
'/usr/local/bin/gcc-[0-9].[0-9]',
'/usr/local/bin/gcc-[0-9]']
if 'darwin' in platform.platform().lower():
gcc_binaries = []
for pattern in patterns:
gcc_binaries += glob.glob(pattern)
gcc_binaries.sort()
if gcc_binaries:
_, gcc = os.path.split(gcc_binaries[-1])
os.environ["CC"] = gcc
else:
raise Exception('No GCC available. Install gcc from Homebrew '
'using brew install gcc.') | def function[set_gcc, parameter[]]:
constant[
Try to find and use GCC on OSX for OpenMP support.
]
variable[patterns] assign[=] list[[<ast.Constant object at 0x7da1b1d5e230>, <ast.Constant object at 0x7da1b1d5d090>, <ast.Constant object at 0x7da1b1d5d870>, <ast.Constant object at 0x7da1b1d5cc10>]]
if compare[constant[darwin] in call[call[name[platform].platform, parameter[]].lower, parameter[]]] begin[:]
variable[gcc_binaries] assign[=] list[[]]
for taget[name[pattern]] in starred[name[patterns]] begin[:]
<ast.AugAssign object at 0x7da1b1d8b820>
call[name[gcc_binaries].sort, parameter[]]
if name[gcc_binaries] begin[:]
<ast.Tuple object at 0x7da1b1d8be50> assign[=] call[name[os].path.split, parameter[call[name[gcc_binaries]][<ast.UnaryOp object at 0x7da1b1d8b1f0>]]]
call[name[os].environ][constant[CC]] assign[=] name[gcc] | keyword[def] identifier[set_gcc] ():
literal[string]
identifier[patterns] =[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
keyword[if] literal[string] keyword[in] identifier[platform] . identifier[platform] (). identifier[lower] ():
identifier[gcc_binaries] =[]
keyword[for] identifier[pattern] keyword[in] identifier[patterns] :
identifier[gcc_binaries] += identifier[glob] . identifier[glob] ( identifier[pattern] )
identifier[gcc_binaries] . identifier[sort] ()
keyword[if] identifier[gcc_binaries] :
identifier[_] , identifier[gcc] = identifier[os] . identifier[path] . identifier[split] ( identifier[gcc_binaries] [- literal[int] ])
identifier[os] . identifier[environ] [ literal[string] ]= identifier[gcc]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string]
literal[string] ) | def set_gcc():
"""
Try to find and use GCC on OSX for OpenMP support.
"""
# For macports and homebrew
patterns = ['/opt/local/bin/gcc-mp-[0-9].[0-9]', '/opt/local/bin/gcc-mp-[0-9]', '/usr/local/bin/gcc-[0-9].[0-9]', '/usr/local/bin/gcc-[0-9]']
if 'darwin' in platform.platform().lower():
gcc_binaries = []
for pattern in patterns:
gcc_binaries += glob.glob(pattern) # depends on [control=['for'], data=['pattern']]
gcc_binaries.sort()
if gcc_binaries:
(_, gcc) = os.path.split(gcc_binaries[-1])
os.environ['CC'] = gcc # depends on [control=['if'], data=[]]
else:
raise Exception('No GCC available. Install gcc from Homebrew using brew install gcc.') # depends on [control=['if'], data=[]] |
def parse_band_log(self, message):
"""Process incoming logging messages from the service."""
if "payload" in message and hasattr(message["payload"], "name"):
record = message["payload"]
for k in dir(record):
if k.startswith("workflows_exc_"):
setattr(record, k[14:], getattr(record, k))
delattr(record, k)
for k, v in self.get_status().items():
setattr(record, "workflows_" + k, v)
logging.getLogger(record.name).handle(record)
else:
self.log.warning(
"Received broken record on log band\n" + "Message: %s\nRecord: %s",
str(message),
str(
hasattr(message.get("payload"), "__dict__")
and message["payload"].__dict__
),
) | def function[parse_band_log, parameter[self, message]]:
constant[Process incoming logging messages from the service.]
if <ast.BoolOp object at 0x7da18c4cc850> begin[:]
variable[record] assign[=] call[name[message]][constant[payload]]
for taget[name[k]] in starred[call[name[dir], parameter[name[record]]]] begin[:]
if call[name[k].startswith, parameter[constant[workflows_exc_]]] begin[:]
call[name[setattr], parameter[name[record], call[name[k]][<ast.Slice object at 0x7da18c4cd270>], call[name[getattr], parameter[name[record], name[k]]]]]
call[name[delattr], parameter[name[record], name[k]]]
for taget[tuple[[<ast.Name object at 0x7da18c4cf640>, <ast.Name object at 0x7da18c4cc5b0>]]] in starred[call[call[name[self].get_status, parameter[]].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[record], binary_operation[constant[workflows_] + name[k]], name[v]]]
call[call[name[logging].getLogger, parameter[name[record].name]].handle, parameter[name[record]]] | keyword[def] identifier[parse_band_log] ( identifier[self] , identifier[message] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[message] keyword[and] identifier[hasattr] ( identifier[message] [ literal[string] ], literal[string] ):
identifier[record] = identifier[message] [ literal[string] ]
keyword[for] identifier[k] keyword[in] identifier[dir] ( identifier[record] ):
keyword[if] identifier[k] . identifier[startswith] ( literal[string] ):
identifier[setattr] ( identifier[record] , identifier[k] [ literal[int] :], identifier[getattr] ( identifier[record] , identifier[k] ))
identifier[delattr] ( identifier[record] , identifier[k] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[get_status] (). identifier[items] ():
identifier[setattr] ( identifier[record] , literal[string] + identifier[k] , identifier[v] )
identifier[logging] . identifier[getLogger] ( identifier[record] . identifier[name] ). identifier[handle] ( identifier[record] )
keyword[else] :
identifier[self] . identifier[log] . identifier[warning] (
literal[string] + literal[string] ,
identifier[str] ( identifier[message] ),
identifier[str] (
identifier[hasattr] ( identifier[message] . identifier[get] ( literal[string] ), literal[string] )
keyword[and] identifier[message] [ literal[string] ]. identifier[__dict__]
),
) | def parse_band_log(self, message):
"""Process incoming logging messages from the service."""
if 'payload' in message and hasattr(message['payload'], 'name'):
record = message['payload']
for k in dir(record):
if k.startswith('workflows_exc_'):
setattr(record, k[14:], getattr(record, k))
delattr(record, k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
for (k, v) in self.get_status().items():
setattr(record, 'workflows_' + k, v) # depends on [control=['for'], data=[]]
logging.getLogger(record.name).handle(record) # depends on [control=['if'], data=[]]
else:
self.log.warning('Received broken record on log band\n' + 'Message: %s\nRecord: %s', str(message), str(hasattr(message.get('payload'), '__dict__') and message['payload'].__dict__)) |
def random_leaf(self):
"Returns a random variable with the associated weight"
for i in range(self._number_tries_feasible_ind):
var = np.random.randint(self.nvar)
v = self._random_leaf(var)
if v is None:
continue
return v
raise RuntimeError("Could not find a suitable random leaf") | def function[random_leaf, parameter[self]]:
constant[Returns a random variable with the associated weight]
for taget[name[i]] in starred[call[name[range], parameter[name[self]._number_tries_feasible_ind]]] begin[:]
variable[var] assign[=] call[name[np].random.randint, parameter[name[self].nvar]]
variable[v] assign[=] call[name[self]._random_leaf, parameter[name[var]]]
if compare[name[v] is constant[None]] begin[:]
continue
return[name[v]]
<ast.Raise object at 0x7da1b0ea1300> | keyword[def] identifier[random_leaf] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_number_tries_feasible_ind] ):
identifier[var] = identifier[np] . identifier[random] . identifier[randint] ( identifier[self] . identifier[nvar] )
identifier[v] = identifier[self] . identifier[_random_leaf] ( identifier[var] )
keyword[if] identifier[v] keyword[is] keyword[None] :
keyword[continue]
keyword[return] identifier[v]
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def random_leaf(self):
"""Returns a random variable with the associated weight"""
for i in range(self._number_tries_feasible_ind):
var = np.random.randint(self.nvar)
v = self._random_leaf(var)
if v is None:
continue # depends on [control=['if'], data=[]]
return v # depends on [control=['for'], data=[]]
raise RuntimeError('Could not find a suitable random leaf') |
def emit_var_assign(self, var, t):
""" Emits code for storing a value into a variable
:param var: variable (node) to be updated
:param t: the value to emmit (e.g. a _label, a const, a tN...)
"""
p = '*' if var.byref else '' # Indirection prefix
if self.O_LEVEL > 1 and not var.accessed:
return
if not var.type_.is_basic:
raise NotImplementedError()
if var.scope == SCOPE.global_:
self.emit('store' + self.TSUFFIX(var.type_), var.mangled, t)
elif var.scope == SCOPE.parameter:
self.emit('pstore' + self.TSUFFIX(var.type_), p + str(var.offset), t)
elif var.scope == SCOPE.local:
if var.alias is not None and var.alias.class_ == CLASS.array:
var.offset -= 1 + 2 * var.alias.count
self.emit('pstore' + self.TSUFFIX(var.type_), p + str(-var.offset), t) | def function[emit_var_assign, parameter[self, var, t]]:
constant[ Emits code for storing a value into a variable
:param var: variable (node) to be updated
:param t: the value to emmit (e.g. a _label, a const, a tN...)
]
variable[p] assign[=] <ast.IfExp object at 0x7da18fe93c40>
if <ast.BoolOp object at 0x7da18fe91390> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da2041d8eb0> begin[:]
<ast.Raise object at 0x7da2041d9c60>
if compare[name[var].scope equal[==] name[SCOPE].global_] begin[:]
call[name[self].emit, parameter[binary_operation[constant[store] + call[name[self].TSUFFIX, parameter[name[var].type_]]], name[var].mangled, name[t]]] | keyword[def] identifier[emit_var_assign] ( identifier[self] , identifier[var] , identifier[t] ):
literal[string]
identifier[p] = literal[string] keyword[if] identifier[var] . identifier[byref] keyword[else] literal[string]
keyword[if] identifier[self] . identifier[O_LEVEL] > literal[int] keyword[and] keyword[not] identifier[var] . identifier[accessed] :
keyword[return]
keyword[if] keyword[not] identifier[var] . identifier[type_] . identifier[is_basic] :
keyword[raise] identifier[NotImplementedError] ()
keyword[if] identifier[var] . identifier[scope] == identifier[SCOPE] . identifier[global_] :
identifier[self] . identifier[emit] ( literal[string] + identifier[self] . identifier[TSUFFIX] ( identifier[var] . identifier[type_] ), identifier[var] . identifier[mangled] , identifier[t] )
keyword[elif] identifier[var] . identifier[scope] == identifier[SCOPE] . identifier[parameter] :
identifier[self] . identifier[emit] ( literal[string] + identifier[self] . identifier[TSUFFIX] ( identifier[var] . identifier[type_] ), identifier[p] + identifier[str] ( identifier[var] . identifier[offset] ), identifier[t] )
keyword[elif] identifier[var] . identifier[scope] == identifier[SCOPE] . identifier[local] :
keyword[if] identifier[var] . identifier[alias] keyword[is] keyword[not] keyword[None] keyword[and] identifier[var] . identifier[alias] . identifier[class_] == identifier[CLASS] . identifier[array] :
identifier[var] . identifier[offset] -= literal[int] + literal[int] * identifier[var] . identifier[alias] . identifier[count]
identifier[self] . identifier[emit] ( literal[string] + identifier[self] . identifier[TSUFFIX] ( identifier[var] . identifier[type_] ), identifier[p] + identifier[str] (- identifier[var] . identifier[offset] ), identifier[t] ) | def emit_var_assign(self, var, t):
""" Emits code for storing a value into a variable
:param var: variable (node) to be updated
:param t: the value to emmit (e.g. a _label, a const, a tN...)
"""
p = '*' if var.byref else '' # Indirection prefix
if self.O_LEVEL > 1 and (not var.accessed):
return # depends on [control=['if'], data=[]]
if not var.type_.is_basic:
raise NotImplementedError() # depends on [control=['if'], data=[]]
if var.scope == SCOPE.global_:
self.emit('store' + self.TSUFFIX(var.type_), var.mangled, t) # depends on [control=['if'], data=[]]
elif var.scope == SCOPE.parameter:
self.emit('pstore' + self.TSUFFIX(var.type_), p + str(var.offset), t) # depends on [control=['if'], data=[]]
elif var.scope == SCOPE.local:
if var.alias is not None and var.alias.class_ == CLASS.array:
var.offset -= 1 + 2 * var.alias.count # depends on [control=['if'], data=[]]
self.emit('pstore' + self.TSUFFIX(var.type_), p + str(-var.offset), t) # depends on [control=['if'], data=[]] |
def strip_suffix(s, suffix, strict=False):
"""Removes the suffix, if it's there, otherwise returns input string unchanged.
If strict is True, also ensures the suffix was present"""
if s.endswith(suffix):
return s[: len(s) - len(suffix)]
elif strict:
raise WimpyError("string doesn't end with suffix")
return s | def function[strip_suffix, parameter[s, suffix, strict]]:
constant[Removes the suffix, if it's there, otherwise returns input string unchanged.
If strict is True, also ensures the suffix was present]
if call[name[s].endswith, parameter[name[suffix]]] begin[:]
return[call[name[s]][<ast.Slice object at 0x7da20cabdfc0>]]
return[name[s]] | keyword[def] identifier[strip_suffix] ( identifier[s] , identifier[suffix] , identifier[strict] = keyword[False] ):
literal[string]
keyword[if] identifier[s] . identifier[endswith] ( identifier[suffix] ):
keyword[return] identifier[s] [: identifier[len] ( identifier[s] )- identifier[len] ( identifier[suffix] )]
keyword[elif] identifier[strict] :
keyword[raise] identifier[WimpyError] ( literal[string] )
keyword[return] identifier[s] | def strip_suffix(s, suffix, strict=False):
"""Removes the suffix, if it's there, otherwise returns input string unchanged.
If strict is True, also ensures the suffix was present"""
if s.endswith(suffix):
return s[:len(s) - len(suffix)] # depends on [control=['if'], data=[]]
elif strict:
raise WimpyError("string doesn't end with suffix") # depends on [control=['if'], data=[]]
return s |
def delete_loadbalancer(self, datacenter_id, loadbalancer_id):
"""
Removes the load balancer from the data center.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param loadbalancer_id: The unique ID of the load balancer.
:type loadbalancer_id: ``str``
"""
response = self._perform_request(
url='/datacenters/%s/loadbalancers/%s' % (
datacenter_id, loadbalancer_id), method='DELETE')
return response | def function[delete_loadbalancer, parameter[self, datacenter_id, loadbalancer_id]]:
constant[
Removes the load balancer from the data center.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param loadbalancer_id: The unique ID of the load balancer.
:type loadbalancer_id: ``str``
]
variable[response] assign[=] call[name[self]._perform_request, parameter[]]
return[name[response]] | keyword[def] identifier[delete_loadbalancer] ( identifier[self] , identifier[datacenter_id] , identifier[loadbalancer_id] ):
literal[string]
identifier[response] = identifier[self] . identifier[_perform_request] (
identifier[url] = literal[string] %(
identifier[datacenter_id] , identifier[loadbalancer_id] ), identifier[method] = literal[string] )
keyword[return] identifier[response] | def delete_loadbalancer(self, datacenter_id, loadbalancer_id):
"""
Removes the load balancer from the data center.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param loadbalancer_id: The unique ID of the load balancer.
:type loadbalancer_id: ``str``
"""
response = self._perform_request(url='/datacenters/%s/loadbalancers/%s' % (datacenter_id, loadbalancer_id), method='DELETE')
return response |
def nonzero_pixels(self):
""" Return an array of the nonzero pixels.
Returns
-------
:obj:`numpy.ndarray`
Nx2 array of the nonzero pixels
"""
nonzero_px = np.where(np.sum(self.raw_data, axis=2) > 0)
nonzero_px = np.c_[nonzero_px[0], nonzero_px[1]]
return nonzero_px | def function[nonzero_pixels, parameter[self]]:
constant[ Return an array of the nonzero pixels.
Returns
-------
:obj:`numpy.ndarray`
Nx2 array of the nonzero pixels
]
variable[nonzero_px] assign[=] call[name[np].where, parameter[compare[call[name[np].sum, parameter[name[self].raw_data]] greater[>] constant[0]]]]
variable[nonzero_px] assign[=] call[name[np].c_][tuple[[<ast.Subscript object at 0x7da1b04d7cd0>, <ast.Subscript object at 0x7da1b04d7e50>]]]
return[name[nonzero_px]] | keyword[def] identifier[nonzero_pixels] ( identifier[self] ):
literal[string]
identifier[nonzero_px] = identifier[np] . identifier[where] ( identifier[np] . identifier[sum] ( identifier[self] . identifier[raw_data] , identifier[axis] = literal[int] )> literal[int] )
identifier[nonzero_px] = identifier[np] . identifier[c_] [ identifier[nonzero_px] [ literal[int] ], identifier[nonzero_px] [ literal[int] ]]
keyword[return] identifier[nonzero_px] | def nonzero_pixels(self):
""" Return an array of the nonzero pixels.
Returns
-------
:obj:`numpy.ndarray`
Nx2 array of the nonzero pixels
"""
nonzero_px = np.where(np.sum(self.raw_data, axis=2) > 0)
nonzero_px = np.c_[nonzero_px[0], nonzero_px[1]]
return nonzero_px |
def _parse_tag(self, tag):
"""
Given a tag string (characters enclosed by []), this function will
parse any options and return a tuple of the form:
(valid, tag_name, closer, options)
"""
if not tag.startswith(self.tag_opener) or not tag.endswith(self.tag_closer) or ('\n' in tag) or ('\r' in tag):
return (False, tag, False, None)
tag_name = tag[len(self.tag_opener):-len(self.tag_closer)].strip()
if not tag_name:
return (False, tag, False, None)
closer = False
opts = {}
if tag_name[0] == '/':
tag_name = tag_name[1:]
closer = True
# Parse options inside the opening tag, if needed.
if (('=' in tag_name) or (' ' in tag_name)) and not closer:
tag_name, opts = self._parse_opts(tag_name)
return (True, tag_name.strip().lower(), closer, opts) | def function[_parse_tag, parameter[self, tag]]:
constant[
Given a tag string (characters enclosed by []), this function will
parse any options and return a tuple of the form:
(valid, tag_name, closer, options)
]
if <ast.BoolOp object at 0x7da1b05ef970> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b05ed8d0>, <ast.Name object at 0x7da1b05ed840>, <ast.Constant object at 0x7da1b05eceb0>, <ast.Constant object at 0x7da1b05ee260>]]]
variable[tag_name] assign[=] call[call[name[tag]][<ast.Slice object at 0x7da1b05ed990>].strip, parameter[]]
if <ast.UnaryOp object at 0x7da1b05eee60> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b05ee770>, <ast.Name object at 0x7da1b05ee530>, <ast.Constant object at 0x7da1b05ef9a0>, <ast.Constant object at 0x7da1b05ef580>]]]
variable[closer] assign[=] constant[False]
variable[opts] assign[=] dictionary[[], []]
if compare[call[name[tag_name]][constant[0]] equal[==] constant[/]] begin[:]
variable[tag_name] assign[=] call[name[tag_name]][<ast.Slice object at 0x7da1b05ef940>]
variable[closer] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b05ed6f0> begin[:]
<ast.Tuple object at 0x7da1b26ad9c0> assign[=] call[name[self]._parse_opts, parameter[name[tag_name]]]
return[tuple[[<ast.Constant object at 0x7da1b0507460>, <ast.Call object at 0x7da1b0505a50>, <ast.Name object at 0x7da1b05062c0>, <ast.Name object at 0x7da1b0506620>]]] | keyword[def] identifier[_parse_tag] ( identifier[self] , identifier[tag] ):
literal[string]
keyword[if] keyword[not] identifier[tag] . identifier[startswith] ( identifier[self] . identifier[tag_opener] ) keyword[or] keyword[not] identifier[tag] . identifier[endswith] ( identifier[self] . identifier[tag_closer] ) keyword[or] ( literal[string] keyword[in] identifier[tag] ) keyword[or] ( literal[string] keyword[in] identifier[tag] ):
keyword[return] ( keyword[False] , identifier[tag] , keyword[False] , keyword[None] )
identifier[tag_name] = identifier[tag] [ identifier[len] ( identifier[self] . identifier[tag_opener] ):- identifier[len] ( identifier[self] . identifier[tag_closer] )]. identifier[strip] ()
keyword[if] keyword[not] identifier[tag_name] :
keyword[return] ( keyword[False] , identifier[tag] , keyword[False] , keyword[None] )
identifier[closer] = keyword[False]
identifier[opts] ={}
keyword[if] identifier[tag_name] [ literal[int] ]== literal[string] :
identifier[tag_name] = identifier[tag_name] [ literal[int] :]
identifier[closer] = keyword[True]
keyword[if] (( literal[string] keyword[in] identifier[tag_name] ) keyword[or] ( literal[string] keyword[in] identifier[tag_name] )) keyword[and] keyword[not] identifier[closer] :
identifier[tag_name] , identifier[opts] = identifier[self] . identifier[_parse_opts] ( identifier[tag_name] )
keyword[return] ( keyword[True] , identifier[tag_name] . identifier[strip] (). identifier[lower] (), identifier[closer] , identifier[opts] ) | def _parse_tag(self, tag):
"""
Given a tag string (characters enclosed by []), this function will
parse any options and return a tuple of the form:
(valid, tag_name, closer, options)
"""
if not tag.startswith(self.tag_opener) or not tag.endswith(self.tag_closer) or '\n' in tag or ('\r' in tag):
return (False, tag, False, None) # depends on [control=['if'], data=[]]
tag_name = tag[len(self.tag_opener):-len(self.tag_closer)].strip()
if not tag_name:
return (False, tag, False, None) # depends on [control=['if'], data=[]]
closer = False
opts = {}
if tag_name[0] == '/':
tag_name = tag_name[1:]
closer = True # depends on [control=['if'], data=[]]
# Parse options inside the opening tag, if needed.
if ('=' in tag_name or ' ' in tag_name) and (not closer):
(tag_name, opts) = self._parse_opts(tag_name) # depends on [control=['if'], data=[]]
return (True, tag_name.strip().lower(), closer, opts) |
def get_name_from_abbrev(abbrev, case_sensitive=False):
"""
Given a country code abbreviation, get the full name from the table.
abbrev: (str) Country code to retrieve the full name of.
case_sensitive: (bool) When True, enforce case sensitivity.
"""
if case_sensitive:
country_code = abbrev
else:
country_code = abbrev.upper()
for code, full_name in COUNTRY_TUPLES:
if country_code == code:
return full_name
raise KeyError('No country with that country code.') | def function[get_name_from_abbrev, parameter[abbrev, case_sensitive]]:
constant[
Given a country code abbreviation, get the full name from the table.
abbrev: (str) Country code to retrieve the full name of.
case_sensitive: (bool) When True, enforce case sensitivity.
]
if name[case_sensitive] begin[:]
variable[country_code] assign[=] name[abbrev]
for taget[tuple[[<ast.Name object at 0x7da1b10e6410>, <ast.Name object at 0x7da1b10e4a00>]]] in starred[name[COUNTRY_TUPLES]] begin[:]
if compare[name[country_code] equal[==] name[code]] begin[:]
return[name[full_name]]
<ast.Raise object at 0x7da1b10e6b60> | keyword[def] identifier[get_name_from_abbrev] ( identifier[abbrev] , identifier[case_sensitive] = keyword[False] ):
literal[string]
keyword[if] identifier[case_sensitive] :
identifier[country_code] = identifier[abbrev]
keyword[else] :
identifier[country_code] = identifier[abbrev] . identifier[upper] ()
keyword[for] identifier[code] , identifier[full_name] keyword[in] identifier[COUNTRY_TUPLES] :
keyword[if] identifier[country_code] == identifier[code] :
keyword[return] identifier[full_name]
keyword[raise] identifier[KeyError] ( literal[string] ) | def get_name_from_abbrev(abbrev, case_sensitive=False):
"""
Given a country code abbreviation, get the full name from the table.
abbrev: (str) Country code to retrieve the full name of.
case_sensitive: (bool) When True, enforce case sensitivity.
"""
if case_sensitive:
country_code = abbrev # depends on [control=['if'], data=[]]
else:
country_code = abbrev.upper()
for (code, full_name) in COUNTRY_TUPLES:
if country_code == code:
return full_name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise KeyError('No country with that country code.') |
def lvresize(size=None, lvpath=None, extents=None):
'''
Return information about the logical volume(s)
CLI Examples:
.. code-block:: bash
salt '*' lvm.lvresize +12M /dev/mapper/vg1-test
salt '*' lvm.lvresize lvpath=/dev/mapper/vg1-test extents=+100%FREE
'''
if size and extents:
log.error('Error: Please specify only one of size or extents')
return {}
cmd = ['lvresize']
if size:
cmd.extend(['-L', '{0}'.format(size)])
elif extents:
cmd.extend(['-l', '{0}'.format(extents)])
else:
log.error('Error: Either size or extents must be specified')
return {}
cmd.append(lvpath)
cmd_ret = __salt__['cmd.run'](cmd, python_shell=False).splitlines()
return {'Output from lvresize': cmd_ret[0].strip()} | def function[lvresize, parameter[size, lvpath, extents]]:
constant[
Return information about the logical volume(s)
CLI Examples:
.. code-block:: bash
salt '*' lvm.lvresize +12M /dev/mapper/vg1-test
salt '*' lvm.lvresize lvpath=/dev/mapper/vg1-test extents=+100%FREE
]
if <ast.BoolOp object at 0x7da1b1fa6830> begin[:]
call[name[log].error, parameter[constant[Error: Please specify only one of size or extents]]]
return[dictionary[[], []]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b1fa6f20>]]
if name[size] begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da1b1fa6fe0>, <ast.Call object at 0x7da1b1fa7070>]]]]
call[name[cmd].append, parameter[name[lvpath]]]
variable[cmd_ret] assign[=] call[call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]].splitlines, parameter[]]
return[dictionary[[<ast.Constant object at 0x7da1b1fa6530>], [<ast.Call object at 0x7da1b1fa6560>]]] | keyword[def] identifier[lvresize] ( identifier[size] = keyword[None] , identifier[lvpath] = keyword[None] , identifier[extents] = keyword[None] ):
literal[string]
keyword[if] identifier[size] keyword[and] identifier[extents] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] {}
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[size] :
identifier[cmd] . identifier[extend] ([ literal[string] , literal[string] . identifier[format] ( identifier[size] )])
keyword[elif] identifier[extents] :
identifier[cmd] . identifier[extend] ([ literal[string] , literal[string] . identifier[format] ( identifier[extents] )])
keyword[else] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] {}
identifier[cmd] . identifier[append] ( identifier[lvpath] )
identifier[cmd_ret] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ). identifier[splitlines] ()
keyword[return] { literal[string] : identifier[cmd_ret] [ literal[int] ]. identifier[strip] ()} | def lvresize(size=None, lvpath=None, extents=None):
"""
Return information about the logical volume(s)
CLI Examples:
.. code-block:: bash
salt '*' lvm.lvresize +12M /dev/mapper/vg1-test
salt '*' lvm.lvresize lvpath=/dev/mapper/vg1-test extents=+100%FREE
"""
if size and extents:
log.error('Error: Please specify only one of size or extents')
return {} # depends on [control=['if'], data=[]]
cmd = ['lvresize']
if size:
cmd.extend(['-L', '{0}'.format(size)]) # depends on [control=['if'], data=[]]
elif extents:
cmd.extend(['-l', '{0}'.format(extents)]) # depends on [control=['if'], data=[]]
else:
log.error('Error: Either size or extents must be specified')
return {}
cmd.append(lvpath)
cmd_ret = __salt__['cmd.run'](cmd, python_shell=False).splitlines()
return {'Output from lvresize': cmd_ret[0].strip()} |
def filesByType(fileList):
"""
given a list of files, return them as a dict sorted by type:
* plot, tif, data, other
"""
features=["plot","tif","data","other","experiment"]
files={}
for feature in features:
files[feature]=[]
for fname in fileList:
other=True
for feature in features:
if "_"+feature+"_" in fname:
files[feature].extend([fname])
other=False
if other:
files['other'].extend([fname])
return files | def function[filesByType, parameter[fileList]]:
constant[
given a list of files, return them as a dict sorted by type:
* plot, tif, data, other
]
variable[features] assign[=] list[[<ast.Constant object at 0x7da1afe8efe0>, <ast.Constant object at 0x7da1afe07b80>, <ast.Constant object at 0x7da1afe078b0>, <ast.Constant object at 0x7da1afe069e0>, <ast.Constant object at 0x7da1afe04af0>]]
variable[files] assign[=] dictionary[[], []]
for taget[name[feature]] in starred[name[features]] begin[:]
call[name[files]][name[feature]] assign[=] list[[]]
for taget[name[fname]] in starred[name[fileList]] begin[:]
variable[other] assign[=] constant[True]
for taget[name[feature]] in starred[name[features]] begin[:]
if compare[binary_operation[binary_operation[constant[_] + name[feature]] + constant[_]] in name[fname]] begin[:]
call[call[name[files]][name[feature]].extend, parameter[list[[<ast.Name object at 0x7da1afe064d0>]]]]
variable[other] assign[=] constant[False]
if name[other] begin[:]
call[call[name[files]][constant[other]].extend, parameter[list[[<ast.Name object at 0x7da1afe04cd0>]]]]
return[name[files]] | keyword[def] identifier[filesByType] ( identifier[fileList] ):
literal[string]
identifier[features] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[files] ={}
keyword[for] identifier[feature] keyword[in] identifier[features] :
identifier[files] [ identifier[feature] ]=[]
keyword[for] identifier[fname] keyword[in] identifier[fileList] :
identifier[other] = keyword[True]
keyword[for] identifier[feature] keyword[in] identifier[features] :
keyword[if] literal[string] + identifier[feature] + literal[string] keyword[in] identifier[fname] :
identifier[files] [ identifier[feature] ]. identifier[extend] ([ identifier[fname] ])
identifier[other] = keyword[False]
keyword[if] identifier[other] :
identifier[files] [ literal[string] ]. identifier[extend] ([ identifier[fname] ])
keyword[return] identifier[files] | def filesByType(fileList):
"""
given a list of files, return them as a dict sorted by type:
* plot, tif, data, other
"""
features = ['plot', 'tif', 'data', 'other', 'experiment']
files = {}
for feature in features:
files[feature] = [] # depends on [control=['for'], data=['feature']]
for fname in fileList:
other = True
for feature in features:
if '_' + feature + '_' in fname:
files[feature].extend([fname])
other = False # depends on [control=['if'], data=['fname']] # depends on [control=['for'], data=['feature']]
if other:
files['other'].extend([fname]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']]
return files |
def main(symbol_yml_file, raw_pickle_file, pickle_dest_path):
"""
Parameters
----------
symbol_yml_file : str
Path to a YAML file which contains recordings.
raw_pickle_file : str
Path to a pickle file which contains raw recordings.
pickle_dest_path : str
Path where the filtered dict gets serialized as a pickle file again.
"""
metadata = get_metadata()
symbol_ids = get_symbol_ids(symbol_yml_file, metadata)
symbol_ids = transform_sids(symbol_ids)
raw = load_raw(raw_pickle_file)
filter_and_save(raw, symbol_ids, pickle_dest_path) | def function[main, parameter[symbol_yml_file, raw_pickle_file, pickle_dest_path]]:
constant[
Parameters
----------
symbol_yml_file : str
Path to a YAML file which contains recordings.
raw_pickle_file : str
Path to a pickle file which contains raw recordings.
pickle_dest_path : str
Path where the filtered dict gets serialized as a pickle file again.
]
variable[metadata] assign[=] call[name[get_metadata], parameter[]]
variable[symbol_ids] assign[=] call[name[get_symbol_ids], parameter[name[symbol_yml_file], name[metadata]]]
variable[symbol_ids] assign[=] call[name[transform_sids], parameter[name[symbol_ids]]]
variable[raw] assign[=] call[name[load_raw], parameter[name[raw_pickle_file]]]
call[name[filter_and_save], parameter[name[raw], name[symbol_ids], name[pickle_dest_path]]] | keyword[def] identifier[main] ( identifier[symbol_yml_file] , identifier[raw_pickle_file] , identifier[pickle_dest_path] ):
literal[string]
identifier[metadata] = identifier[get_metadata] ()
identifier[symbol_ids] = identifier[get_symbol_ids] ( identifier[symbol_yml_file] , identifier[metadata] )
identifier[symbol_ids] = identifier[transform_sids] ( identifier[symbol_ids] )
identifier[raw] = identifier[load_raw] ( identifier[raw_pickle_file] )
identifier[filter_and_save] ( identifier[raw] , identifier[symbol_ids] , identifier[pickle_dest_path] ) | def main(symbol_yml_file, raw_pickle_file, pickle_dest_path):
"""
Parameters
----------
symbol_yml_file : str
Path to a YAML file which contains recordings.
raw_pickle_file : str
Path to a pickle file which contains raw recordings.
pickle_dest_path : str
Path where the filtered dict gets serialized as a pickle file again.
"""
metadata = get_metadata()
symbol_ids = get_symbol_ids(symbol_yml_file, metadata)
symbol_ids = transform_sids(symbol_ids)
raw = load_raw(raw_pickle_file)
filter_and_save(raw, symbol_ids, pickle_dest_path) |
def horizon_dashboard_nav(context):
"""Generates sub-navigation entries for the current dashboard."""
if 'request' not in context:
return {}
dashboard = context['request'].horizon['dashboard']
panel_groups = dashboard.get_panel_groups()
non_empty_groups = []
for group in panel_groups.values():
allowed_panels = []
for panel in group:
if (callable(panel.nav) and panel.nav(context) and
panel.can_access(context)):
allowed_panels.append(panel)
elif (not callable(panel.nav) and panel.nav and
panel.can_access(context)):
allowed_panels.append(panel)
if allowed_panels:
if group.name is None:
non_empty_groups.append((dashboard.name, allowed_panels))
else:
non_empty_groups.append((group.name, allowed_panels))
return {'components': OrderedDict(non_empty_groups),
'user': context['request'].user,
'current': context['request'].horizon['panel'].slug,
'request': context['request']} | def function[horizon_dashboard_nav, parameter[context]]:
constant[Generates sub-navigation entries for the current dashboard.]
if compare[constant[request] <ast.NotIn object at 0x7da2590d7190> name[context]] begin[:]
return[dictionary[[], []]]
variable[dashboard] assign[=] call[call[name[context]][constant[request]].horizon][constant[dashboard]]
variable[panel_groups] assign[=] call[name[dashboard].get_panel_groups, parameter[]]
variable[non_empty_groups] assign[=] list[[]]
for taget[name[group]] in starred[call[name[panel_groups].values, parameter[]]] begin[:]
variable[allowed_panels] assign[=] list[[]]
for taget[name[panel]] in starred[name[group]] begin[:]
if <ast.BoolOp object at 0x7da1b1914820> begin[:]
call[name[allowed_panels].append, parameter[name[panel]]]
if name[allowed_panels] begin[:]
if compare[name[group].name is constant[None]] begin[:]
call[name[non_empty_groups].append, parameter[tuple[[<ast.Attribute object at 0x7da1b1915e10>, <ast.Name object at 0x7da1b1915d80>]]]]
return[dictionary[[<ast.Constant object at 0x7da1b1917910>, <ast.Constant object at 0x7da1b1917ac0>, <ast.Constant object at 0x7da1b1914c40>, <ast.Constant object at 0x7da1b1917dc0>], [<ast.Call object at 0x7da1b1917c10>, <ast.Attribute object at 0x7da1b184ebc0>, <ast.Attribute object at 0x7da1b184d0c0>, <ast.Subscript object at 0x7da1b184caf0>]]] | keyword[def] identifier[horizon_dashboard_nav] ( identifier[context] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[context] :
keyword[return] {}
identifier[dashboard] = identifier[context] [ literal[string] ]. identifier[horizon] [ literal[string] ]
identifier[panel_groups] = identifier[dashboard] . identifier[get_panel_groups] ()
identifier[non_empty_groups] =[]
keyword[for] identifier[group] keyword[in] identifier[panel_groups] . identifier[values] ():
identifier[allowed_panels] =[]
keyword[for] identifier[panel] keyword[in] identifier[group] :
keyword[if] ( identifier[callable] ( identifier[panel] . identifier[nav] ) keyword[and] identifier[panel] . identifier[nav] ( identifier[context] ) keyword[and]
identifier[panel] . identifier[can_access] ( identifier[context] )):
identifier[allowed_panels] . identifier[append] ( identifier[panel] )
keyword[elif] ( keyword[not] identifier[callable] ( identifier[panel] . identifier[nav] ) keyword[and] identifier[panel] . identifier[nav] keyword[and]
identifier[panel] . identifier[can_access] ( identifier[context] )):
identifier[allowed_panels] . identifier[append] ( identifier[panel] )
keyword[if] identifier[allowed_panels] :
keyword[if] identifier[group] . identifier[name] keyword[is] keyword[None] :
identifier[non_empty_groups] . identifier[append] (( identifier[dashboard] . identifier[name] , identifier[allowed_panels] ))
keyword[else] :
identifier[non_empty_groups] . identifier[append] (( identifier[group] . identifier[name] , identifier[allowed_panels] ))
keyword[return] { literal[string] : identifier[OrderedDict] ( identifier[non_empty_groups] ),
literal[string] : identifier[context] [ literal[string] ]. identifier[user] ,
literal[string] : identifier[context] [ literal[string] ]. identifier[horizon] [ literal[string] ]. identifier[slug] ,
literal[string] : identifier[context] [ literal[string] ]} | def horizon_dashboard_nav(context):
"""Generates sub-navigation entries for the current dashboard."""
if 'request' not in context:
return {} # depends on [control=['if'], data=[]]
dashboard = context['request'].horizon['dashboard']
panel_groups = dashboard.get_panel_groups()
non_empty_groups = []
for group in panel_groups.values():
allowed_panels = []
for panel in group:
if callable(panel.nav) and panel.nav(context) and panel.can_access(context):
allowed_panels.append(panel) # depends on [control=['if'], data=[]]
elif not callable(panel.nav) and panel.nav and panel.can_access(context):
allowed_panels.append(panel) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['panel']]
if allowed_panels:
if group.name is None:
non_empty_groups.append((dashboard.name, allowed_panels)) # depends on [control=['if'], data=[]]
else:
non_empty_groups.append((group.name, allowed_panels)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']]
return {'components': OrderedDict(non_empty_groups), 'user': context['request'].user, 'current': context['request'].horizon['panel'].slug, 'request': context['request']} |
def format_vk(vk):
"""Format vk before using it"""
# Force extension require to be a list
for ext in get_extensions_filtered(vk):
req = ext['require']
if not isinstance(req, list):
ext['require'] = [req] | def function[format_vk, parameter[vk]]:
constant[Format vk before using it]
for taget[name[ext]] in starred[call[name[get_extensions_filtered], parameter[name[vk]]]] begin[:]
variable[req] assign[=] call[name[ext]][constant[require]]
if <ast.UnaryOp object at 0x7da1b0792740> begin[:]
call[name[ext]][constant[require]] assign[=] list[[<ast.Name object at 0x7da1b0790940>]] | keyword[def] identifier[format_vk] ( identifier[vk] ):
literal[string]
keyword[for] identifier[ext] keyword[in] identifier[get_extensions_filtered] ( identifier[vk] ):
identifier[req] = identifier[ext] [ literal[string] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[req] , identifier[list] ):
identifier[ext] [ literal[string] ]=[ identifier[req] ] | def format_vk(vk):
"""Format vk before using it"""
# Force extension require to be a list
for ext in get_extensions_filtered(vk):
req = ext['require']
if not isinstance(req, list):
ext['require'] = [req] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']] |
def get_positions(self, copy=True):
'''
Get a dictionary of measurement positions.
'''
if copy:
return self._positions.copy()
else:
return self._positions | def function[get_positions, parameter[self, copy]]:
constant[
Get a dictionary of measurement positions.
]
if name[copy] begin[:]
return[call[name[self]._positions.copy, parameter[]]] | keyword[def] identifier[get_positions] ( identifier[self] , identifier[copy] = keyword[True] ):
literal[string]
keyword[if] identifier[copy] :
keyword[return] identifier[self] . identifier[_positions] . identifier[copy] ()
keyword[else] :
keyword[return] identifier[self] . identifier[_positions] | def get_positions(self, copy=True):
"""
Get a dictionary of measurement positions.
"""
if copy:
return self._positions.copy() # depends on [control=['if'], data=[]]
else:
return self._positions |
def _set_if_role(self, v, load=False):
"""
Setter method for if_role, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_mst_detail/output/cist/port/if_role (stp-port-role)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_role is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_role() directly.
YANG Description: Interface role
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'backup': {'value': 5}, u'alternate': {'value': 4}, u'designated': {'value': 2}, u'disabled': {'value': 6}, u'master': {'value': 7}, u'error': {'value': 1}, u'root': {'value': 3}},), is_leaf=True, yang_name="if-role", rest_name="if-role", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='stp-port-role', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """if_role must be of a type compatible with stp-port-role""",
'defined-type': "brocade-xstp-ext:stp-port-role",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'backup': {'value': 5}, u'alternate': {'value': 4}, u'designated': {'value': 2}, u'disabled': {'value': 6}, u'master': {'value': 7}, u'error': {'value': 1}, u'root': {'value': 3}},), is_leaf=True, yang_name="if-role", rest_name="if-role", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='stp-port-role', is_config=True)""",
})
self.__if_role = t
if hasattr(self, '_set'):
self._set() | def function[_set_if_role, parameter[self, v, load]]:
constant[
Setter method for if_role, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_mst_detail/output/cist/port/if_role (stp-port-role)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_role is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_role() directly.
YANG Description: Interface role
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f00eec0>
name[self].__if_role assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_if_role] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__if_role] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_if_role(self, v, load=False):
"""
Setter method for if_role, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_mst_detail/output/cist/port/if_role (stp-port-role)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_role is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_role() directly.
YANG Description: Interface role
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'backup': {'value': 5}, u'alternate': {'value': 4}, u'designated': {'value': 2}, u'disabled': {'value': 6}, u'master': {'value': 7}, u'error': {'value': 1}, u'root': {'value': 3}}), is_leaf=True, yang_name='if-role', rest_name='if-role', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='stp-port-role', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'if_role must be of a type compatible with stp-port-role', 'defined-type': 'brocade-xstp-ext:stp-port-role', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'backup\': {\'value\': 5}, u\'alternate\': {\'value\': 4}, u\'designated\': {\'value\': 2}, u\'disabled\': {\'value\': 6}, u\'master\': {\'value\': 7}, u\'error\': {\'value\': 1}, u\'root\': {\'value\': 3}},), is_leaf=True, yang_name="if-role", rest_name="if-role", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace=\'urn:brocade.com:mgmt:brocade-xstp-ext\', defining_module=\'brocade-xstp-ext\', yang_type=\'stp-port-role\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__if_role = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def Pepper(p=0, per_channel=False, name=None, deterministic=False, random_state=None):
"""
Adds pepper noise to an image, i.e. black-ish pixels.
This is similar to dropout, but slower and the black pixels are not uniformly black.
dtype support::
See ``imgaug.augmenters.arithmetic.ReplaceElementwise``.
Parameters
----------
p : float or tuple of float or list of float or imgaug.parameters.StochasticParameter, optional
Probability of changing a pixel to pepper noise.
* If a float, then that value will be used for all images as the
probability.
* If a tuple ``(a, b)``, then a probability will be sampled per image
from the range ``a <= x <= b``.
* If a list, then a random value will be sampled from that list
per image.
* If a StochasticParameter, then this parameter will be used as
the *mask*, i.e. it is expected to contain values between
0.0 and 1.0, where 1.0 means that pepper is to be added
at that location.
per_channel : bool or float, optional
Whether to use the same value for all channels (False)
or to sample a new value for each channel (True).
If this value is a float ``p``, then for ``p`` percent of all images
`per_channel` will be treated as True, otherwise as False.
name : None or str, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
deterministic : bool, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
random_state : None or int or numpy.random.RandomState, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
Examples
--------
>>> aug = iaa.Pepper(0.05)
Replaces 5 percent of all pixels with pepper.
"""
replacement01 = iap.ForceSign(
iap.Beta(0.5, 0.5) - 0.5,
positive=False,
mode="invert"
) + 0.5
replacement = replacement01 * 255
if name is None:
name = "Unnamed%s" % (ia.caller_name(),)
return ReplaceElementwise(
mask=p,
replacement=replacement,
per_channel=per_channel,
name=name,
deterministic=deterministic,
random_state=random_state
) | def function[Pepper, parameter[p, per_channel, name, deterministic, random_state]]:
constant[
Adds pepper noise to an image, i.e. black-ish pixels.
This is similar to dropout, but slower and the black pixels are not uniformly black.
dtype support::
See ``imgaug.augmenters.arithmetic.ReplaceElementwise``.
Parameters
----------
p : float or tuple of float or list of float or imgaug.parameters.StochasticParameter, optional
Probability of changing a pixel to pepper noise.
* If a float, then that value will be used for all images as the
probability.
* If a tuple ``(a, b)``, then a probability will be sampled per image
from the range ``a <= x <= b``.
* If a list, then a random value will be sampled from that list
per image.
* If a StochasticParameter, then this parameter will be used as
the *mask*, i.e. it is expected to contain values between
0.0 and 1.0, where 1.0 means that pepper is to be added
at that location.
per_channel : bool or float, optional
Whether to use the same value for all channels (False)
or to sample a new value for each channel (True).
If this value is a float ``p``, then for ``p`` percent of all images
`per_channel` will be treated as True, otherwise as False.
name : None or str, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
deterministic : bool, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
random_state : None or int or numpy.random.RandomState, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
Examples
--------
>>> aug = iaa.Pepper(0.05)
Replaces 5 percent of all pixels with pepper.
]
variable[replacement01] assign[=] binary_operation[call[name[iap].ForceSign, parameter[binary_operation[call[name[iap].Beta, parameter[constant[0.5], constant[0.5]]] - constant[0.5]]]] + constant[0.5]]
variable[replacement] assign[=] binary_operation[name[replacement01] * constant[255]]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] binary_operation[constant[Unnamed%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f8101f0>]]]
return[call[name[ReplaceElementwise], parameter[]]] | keyword[def] identifier[Pepper] ( identifier[p] = literal[int] , identifier[per_channel] = keyword[False] , identifier[name] = keyword[None] , identifier[deterministic] = keyword[False] , identifier[random_state] = keyword[None] ):
literal[string]
identifier[replacement01] = identifier[iap] . identifier[ForceSign] (
identifier[iap] . identifier[Beta] ( literal[int] , literal[int] )- literal[int] ,
identifier[positive] = keyword[False] ,
identifier[mode] = literal[string]
)+ literal[int]
identifier[replacement] = identifier[replacement01] * literal[int]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = literal[string] %( identifier[ia] . identifier[caller_name] (),)
keyword[return] identifier[ReplaceElementwise] (
identifier[mask] = identifier[p] ,
identifier[replacement] = identifier[replacement] ,
identifier[per_channel] = identifier[per_channel] ,
identifier[name] = identifier[name] ,
identifier[deterministic] = identifier[deterministic] ,
identifier[random_state] = identifier[random_state]
) | def Pepper(p=0, per_channel=False, name=None, deterministic=False, random_state=None):
"""
Adds pepper noise to an image, i.e. black-ish pixels.
This is similar to dropout, but slower and the black pixels are not uniformly black.
dtype support::
See ``imgaug.augmenters.arithmetic.ReplaceElementwise``.
Parameters
----------
p : float or tuple of float or list of float or imgaug.parameters.StochasticParameter, optional
Probability of changing a pixel to pepper noise.
* If a float, then that value will be used for all images as the
probability.
* If a tuple ``(a, b)``, then a probability will be sampled per image
from the range ``a <= x <= b``.
* If a list, then a random value will be sampled from that list
per image.
* If a StochasticParameter, then this parameter will be used as
the *mask*, i.e. it is expected to contain values between
0.0 and 1.0, where 1.0 means that pepper is to be added
at that location.
per_channel : bool or float, optional
Whether to use the same value for all channels (False)
or to sample a new value for each channel (True).
If this value is a float ``p``, then for ``p`` percent of all images
`per_channel` will be treated as True, otherwise as False.
name : None or str, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
deterministic : bool, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
random_state : None or int or numpy.random.RandomState, optional
See :func:`imgaug.augmenters.meta.Augmenter.__init__`.
Examples
--------
>>> aug = iaa.Pepper(0.05)
Replaces 5 percent of all pixels with pepper.
"""
replacement01 = iap.ForceSign(iap.Beta(0.5, 0.5) - 0.5, positive=False, mode='invert') + 0.5
replacement = replacement01 * 255
if name is None:
name = 'Unnamed%s' % (ia.caller_name(),) # depends on [control=['if'], data=['name']]
return ReplaceElementwise(mask=p, replacement=replacement, per_channel=per_channel, name=name, deterministic=deterministic, random_state=random_state) |
def fetchone(self, query, *args):
"""
Returns the first result of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: The retrieved row with each field being one element in a
`tuple`.
"""
cursor = self.connection.cursor()
try:
cursor.execute(query, args)
return cursor.fetchone()
finally:
cursor.close() | def function[fetchone, parameter[self, query]]:
constant[
Returns the first result of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: The retrieved row with each field being one element in a
`tuple`.
]
variable[cursor] assign[=] call[name[self].connection.cursor, parameter[]]
<ast.Try object at 0x7da1b0f1dd50> | keyword[def] identifier[fetchone] ( identifier[self] , identifier[query] ,* identifier[args] ):
literal[string]
identifier[cursor] = identifier[self] . identifier[connection] . identifier[cursor] ()
keyword[try] :
identifier[cursor] . identifier[execute] ( identifier[query] , identifier[args] )
keyword[return] identifier[cursor] . identifier[fetchone] ()
keyword[finally] :
identifier[cursor] . identifier[close] () | def fetchone(self, query, *args):
"""
Returns the first result of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: The retrieved row with each field being one element in a
`tuple`.
"""
cursor = self.connection.cursor()
try:
cursor.execute(query, args)
return cursor.fetchone() # depends on [control=['try'], data=[]]
finally:
cursor.close() |
def persist(self, **kwargs):
""" Trigger computation in constituent dask arrays
This keeps them as dask arrays but encourages them to keep data in
memory. This is particularly useful when on a distributed machine.
When on a single machine consider using ``.compute()`` instead.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
"""
ds = self._to_temp_dataset().persist(**kwargs)
return self._from_temp_dataset(ds) | def function[persist, parameter[self]]:
constant[ Trigger computation in constituent dask arrays
This keeps them as dask arrays but encourages them to keep data in
memory. This is particularly useful when on a distributed machine.
When on a single machine consider using ``.compute()`` instead.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
]
variable[ds] assign[=] call[call[name[self]._to_temp_dataset, parameter[]].persist, parameter[]]
return[call[name[self]._from_temp_dataset, parameter[name[ds]]]] | keyword[def] identifier[persist] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[ds] = identifier[self] . identifier[_to_temp_dataset] (). identifier[persist] (** identifier[kwargs] )
keyword[return] identifier[self] . identifier[_from_temp_dataset] ( identifier[ds] ) | def persist(self, **kwargs):
""" Trigger computation in constituent dask arrays
This keeps them as dask arrays but encourages them to keep data in
memory. This is particularly useful when on a distributed machine.
When on a single machine consider using ``.compute()`` instead.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
"""
ds = self._to_temp_dataset().persist(**kwargs)
return self._from_temp_dataset(ds) |
def get_cachedir_csig(self):
"""
Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
them from the cache, but the normal get_csig() method will try
to open up the local file, which doesn't exist because the -n
option meant we didn't actually pull the file from cachedir.
But since the file *does* actually exist in the cachedir, we
can use its contents for the csig.
"""
try:
return self.cachedir_csig
except AttributeError:
pass
cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
if not self.exists() and cachefile and os.path.exists(cachefile):
self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
SCons.Node.FS.File.md5_chunksize * 1024)
else:
self.cachedir_csig = self.get_csig()
return self.cachedir_csig | def function[get_cachedir_csig, parameter[self]]:
constant[
Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
them from the cache, but the normal get_csig() method will try
to open up the local file, which doesn't exist because the -n
option meant we didn't actually pull the file from cachedir.
But since the file *does* actually exist in the cachedir, we
can use its contents for the csig.
]
<ast.Try object at 0x7da18fe93790>
<ast.Tuple object at 0x7da18fe933a0> assign[=] call[call[call[name[self].get_build_env, parameter[]].get_CacheDir, parameter[]].cachepath, parameter[name[self]]]
if <ast.BoolOp object at 0x7da204347370> begin[:]
name[self].cachedir_csig assign[=] call[name[SCons].Util.MD5filesignature, parameter[name[cachefile], binary_operation[name[SCons].Node.FS.File.md5_chunksize * constant[1024]]]]
return[name[self].cachedir_csig] | keyword[def] identifier[get_cachedir_csig] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[cachedir_csig]
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[cachedir] , identifier[cachefile] = identifier[self] . identifier[get_build_env] (). identifier[get_CacheDir] (). identifier[cachepath] ( identifier[self] )
keyword[if] keyword[not] identifier[self] . identifier[exists] () keyword[and] identifier[cachefile] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[cachefile] ):
identifier[self] . identifier[cachedir_csig] = identifier[SCons] . identifier[Util] . identifier[MD5filesignature] ( identifier[cachefile] , identifier[SCons] . identifier[Node] . identifier[FS] . identifier[File] . identifier[md5_chunksize] * literal[int] )
keyword[else] :
identifier[self] . identifier[cachedir_csig] = identifier[self] . identifier[get_csig] ()
keyword[return] identifier[self] . identifier[cachedir_csig] | def get_cachedir_csig(self):
"""
Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
them from the cache, but the normal get_csig() method will try
to open up the local file, which doesn't exist because the -n
option meant we didn't actually pull the file from cachedir.
But since the file *does* actually exist in the cachedir, we
can use its contents for the csig.
"""
try:
return self.cachedir_csig # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
(cachedir, cachefile) = self.get_build_env().get_CacheDir().cachepath(self)
if not self.exists() and cachefile and os.path.exists(cachefile):
self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, SCons.Node.FS.File.md5_chunksize * 1024) # depends on [control=['if'], data=[]]
else:
self.cachedir_csig = self.get_csig()
return self.cachedir_csig |
def list_resourcepools(kwargs=None, call=None):
'''
List all the resource pools for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_resourcepools my-vmware-config
'''
if call != 'function':
raise SaltCloudSystemExit(
'The list_resourcepools function must be called with '
'-f or --function.'
)
return {'Resource Pools': salt.utils.vmware.list_resourcepools(_get_si())} | def function[list_resourcepools, parameter[kwargs, call]]:
constant[
List all the resource pools for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_resourcepools my-vmware-config
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da18f813e50>
return[dictionary[[<ast.Constant object at 0x7da18f813bb0>], [<ast.Call object at 0x7da18f812dd0>]]] | keyword[def] identifier[list_resourcepools] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
keyword[return] { literal[string] : identifier[salt] . identifier[utils] . identifier[vmware] . identifier[list_resourcepools] ( identifier[_get_si] ())} | def list_resourcepools(kwargs=None, call=None):
"""
List all the resource pools for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_resourcepools my-vmware-config
"""
if call != 'function':
raise SaltCloudSystemExit('The list_resourcepools function must be called with -f or --function.') # depends on [control=['if'], data=[]]
return {'Resource Pools': salt.utils.vmware.list_resourcepools(_get_si())} |
def save_patches(self, out_dir):
"""Save the current state of this repository to one or more patches.
Makes one patch against HEAD and another one against the most recent
commit that occurs in an upstream branch. This way we can be robust
to history editing as long as the user never does "push -f" to break
history on an upstream branch.
Writes the first patch to <out_dir>/diff.patch and the second to
<out_dir>/upstream_diff_<commit_id>.patch.
Args:
out_dir (str): Directory to write the patch files.
"""
if not self.git.enabled:
return False
try:
root = self.git.root
if self.git.dirty:
patch_path = os.path.join(out_dir, 'diff.patch')
if self.git.has_submodule_diff:
with open(patch_path, 'wb') as patch:
# we diff against HEAD to ensure we get changes in the index
subprocess.check_call(
['git', 'diff', '--submodule=diff', 'HEAD'], stdout=patch, cwd=root, timeout=5)
else:
with open(patch_path, 'wb') as patch:
subprocess.check_call(
['git', 'diff', 'HEAD'], stdout=patch, cwd=root, timeout=5)
upstream_commit = self.git.get_upstream_fork_point()
if upstream_commit and upstream_commit != self.git.repo.head.commit:
sha = upstream_commit.hexsha
upstream_patch_path = os.path.join(
out_dir, 'upstream_diff_{}.patch'.format(sha))
if self.git.has_submodule_diff:
with open(upstream_patch_path, 'wb') as upstream_patch:
subprocess.check_call(
['git', 'diff', '--submodule=diff', sha], stdout=upstream_patch, cwd=root, timeout=5)
else:
with open(upstream_patch_path, 'wb') as upstream_patch:
subprocess.check_call(
['git', 'diff', sha], stdout=upstream_patch, cwd=root, timeout=5)
except (subprocess.CalledProcessError, subprocess.TimeoutExpired):
logger.error('Error generating diff') | def function[save_patches, parameter[self, out_dir]]:
constant[Save the current state of this repository to one or more patches.
Makes one patch against HEAD and another one against the most recent
commit that occurs in an upstream branch. This way we can be robust
to history editing as long as the user never does "push -f" to break
history on an upstream branch.
Writes the first patch to <out_dir>/diff.patch and the second to
<out_dir>/upstream_diff_<commit_id>.patch.
Args:
out_dir (str): Directory to write the patch files.
]
if <ast.UnaryOp object at 0x7da2045671c0> begin[:]
return[constant[False]]
<ast.Try object at 0x7da204565e10> | keyword[def] identifier[save_patches] ( identifier[self] , identifier[out_dir] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[git] . identifier[enabled] :
keyword[return] keyword[False]
keyword[try] :
identifier[root] = identifier[self] . identifier[git] . identifier[root]
keyword[if] identifier[self] . identifier[git] . identifier[dirty] :
identifier[patch_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] )
keyword[if] identifier[self] . identifier[git] . identifier[has_submodule_diff] :
keyword[with] identifier[open] ( identifier[patch_path] , literal[string] ) keyword[as] identifier[patch] :
identifier[subprocess] . identifier[check_call] (
[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[stdout] = identifier[patch] , identifier[cwd] = identifier[root] , identifier[timeout] = literal[int] )
keyword[else] :
keyword[with] identifier[open] ( identifier[patch_path] , literal[string] ) keyword[as] identifier[patch] :
identifier[subprocess] . identifier[check_call] (
[ literal[string] , literal[string] , literal[string] ], identifier[stdout] = identifier[patch] , identifier[cwd] = identifier[root] , identifier[timeout] = literal[int] )
identifier[upstream_commit] = identifier[self] . identifier[git] . identifier[get_upstream_fork_point] ()
keyword[if] identifier[upstream_commit] keyword[and] identifier[upstream_commit] != identifier[self] . identifier[git] . identifier[repo] . identifier[head] . identifier[commit] :
identifier[sha] = identifier[upstream_commit] . identifier[hexsha]
identifier[upstream_patch_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[out_dir] , literal[string] . identifier[format] ( identifier[sha] ))
keyword[if] identifier[self] . identifier[git] . identifier[has_submodule_diff] :
keyword[with] identifier[open] ( identifier[upstream_patch_path] , literal[string] ) keyword[as] identifier[upstream_patch] :
identifier[subprocess] . identifier[check_call] (
[ literal[string] , literal[string] , literal[string] , identifier[sha] ], identifier[stdout] = identifier[upstream_patch] , identifier[cwd] = identifier[root] , identifier[timeout] = literal[int] )
keyword[else] :
keyword[with] identifier[open] ( identifier[upstream_patch_path] , literal[string] ) keyword[as] identifier[upstream_patch] :
identifier[subprocess] . identifier[check_call] (
[ literal[string] , literal[string] , identifier[sha] ], identifier[stdout] = identifier[upstream_patch] , identifier[cwd] = identifier[root] , identifier[timeout] = literal[int] )
keyword[except] ( identifier[subprocess] . identifier[CalledProcessError] , identifier[subprocess] . identifier[TimeoutExpired] ):
identifier[logger] . identifier[error] ( literal[string] ) | def save_patches(self, out_dir):
"""Save the current state of this repository to one or more patches.
Makes one patch against HEAD and another one against the most recent
commit that occurs in an upstream branch. This way we can be robust
to history editing as long as the user never does "push -f" to break
history on an upstream branch.
Writes the first patch to <out_dir>/diff.patch and the second to
<out_dir>/upstream_diff_<commit_id>.patch.
Args:
out_dir (str): Directory to write the patch files.
"""
if not self.git.enabled:
return False # depends on [control=['if'], data=[]]
try:
root = self.git.root
if self.git.dirty:
patch_path = os.path.join(out_dir, 'diff.patch')
if self.git.has_submodule_diff:
with open(patch_path, 'wb') as patch:
# we diff against HEAD to ensure we get changes in the index
subprocess.check_call(['git', 'diff', '--submodule=diff', 'HEAD'], stdout=patch, cwd=root, timeout=5) # depends on [control=['with'], data=['patch']] # depends on [control=['if'], data=[]]
else:
with open(patch_path, 'wb') as patch:
subprocess.check_call(['git', 'diff', 'HEAD'], stdout=patch, cwd=root, timeout=5) # depends on [control=['with'], data=['patch']] # depends on [control=['if'], data=[]]
upstream_commit = self.git.get_upstream_fork_point()
if upstream_commit and upstream_commit != self.git.repo.head.commit:
sha = upstream_commit.hexsha
upstream_patch_path = os.path.join(out_dir, 'upstream_diff_{}.patch'.format(sha))
if self.git.has_submodule_diff:
with open(upstream_patch_path, 'wb') as upstream_patch:
subprocess.check_call(['git', 'diff', '--submodule=diff', sha], stdout=upstream_patch, cwd=root, timeout=5) # depends on [control=['with'], data=['upstream_patch']] # depends on [control=['if'], data=[]]
else:
with open(upstream_patch_path, 'wb') as upstream_patch:
subprocess.check_call(['git', 'diff', sha], stdout=upstream_patch, cwd=root, timeout=5) # depends on [control=['with'], data=['upstream_patch']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (subprocess.CalledProcessError, subprocess.TimeoutExpired):
logger.error('Error generating diff') # depends on [control=['except'], data=[]] |
def _stringify_path(filepath_or_buffer):
"""Attempt to convert a path-like object to a string.
Parameters
----------
filepath_or_buffer : object to be converted
Returns
-------
str_filepath_or_buffer : maybe a string version of the object
Notes
-----
Objects supporting the fspath protocol (python 3.6+) are coerced
according to its __fspath__ method.
For backwards compatibility with older pythons, pathlib.Path and
py.path objects are specially coerced.
Any other object is passed through unchanged, which includes bytes,
strings, buffers, or anything else that's not even path-like.
"""
try:
import pathlib
_PATHLIB_INSTALLED = True
except ImportError:
_PATHLIB_INSTALLED = False
try:
from py.path import local as LocalPath
_PY_PATH_INSTALLED = True
except ImportError:
_PY_PATH_INSTALLED = False
if hasattr(filepath_or_buffer, '__fspath__'):
return filepath_or_buffer.__fspath__()
if _PATHLIB_INSTALLED and isinstance(filepath_or_buffer, pathlib.Path):
return str(filepath_or_buffer)
if _PY_PATH_INSTALLED and isinstance(filepath_or_buffer, LocalPath):
return filepath_or_buffer.strpath
return _expand_user(filepath_or_buffer) | def function[_stringify_path, parameter[filepath_or_buffer]]:
constant[Attempt to convert a path-like object to a string.
Parameters
----------
filepath_or_buffer : object to be converted
Returns
-------
str_filepath_or_buffer : maybe a string version of the object
Notes
-----
Objects supporting the fspath protocol (python 3.6+) are coerced
according to its __fspath__ method.
For backwards compatibility with older pythons, pathlib.Path and
py.path objects are specially coerced.
Any other object is passed through unchanged, which includes bytes,
strings, buffers, or anything else that's not even path-like.
]
<ast.Try object at 0x7da1b1dfac80>
<ast.Try object at 0x7da1b1dfa110>
if call[name[hasattr], parameter[name[filepath_or_buffer], constant[__fspath__]]] begin[:]
return[call[name[filepath_or_buffer].__fspath__, parameter[]]]
if <ast.BoolOp object at 0x7da1b1dfa470> begin[:]
return[call[name[str], parameter[name[filepath_or_buffer]]]]
if <ast.BoolOp object at 0x7da1b1df9f90> begin[:]
return[name[filepath_or_buffer].strpath]
return[call[name[_expand_user], parameter[name[filepath_or_buffer]]]] | keyword[def] identifier[_stringify_path] ( identifier[filepath_or_buffer] ):
literal[string]
keyword[try] :
keyword[import] identifier[pathlib]
identifier[_PATHLIB_INSTALLED] = keyword[True]
keyword[except] identifier[ImportError] :
identifier[_PATHLIB_INSTALLED] = keyword[False]
keyword[try] :
keyword[from] identifier[py] . identifier[path] keyword[import] identifier[local] keyword[as] identifier[LocalPath]
identifier[_PY_PATH_INSTALLED] = keyword[True]
keyword[except] identifier[ImportError] :
identifier[_PY_PATH_INSTALLED] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[filepath_or_buffer] , literal[string] ):
keyword[return] identifier[filepath_or_buffer] . identifier[__fspath__] ()
keyword[if] identifier[_PATHLIB_INSTALLED] keyword[and] identifier[isinstance] ( identifier[filepath_or_buffer] , identifier[pathlib] . identifier[Path] ):
keyword[return] identifier[str] ( identifier[filepath_or_buffer] )
keyword[if] identifier[_PY_PATH_INSTALLED] keyword[and] identifier[isinstance] ( identifier[filepath_or_buffer] , identifier[LocalPath] ):
keyword[return] identifier[filepath_or_buffer] . identifier[strpath]
keyword[return] identifier[_expand_user] ( identifier[filepath_or_buffer] ) | def _stringify_path(filepath_or_buffer):
"""Attempt to convert a path-like object to a string.
Parameters
----------
filepath_or_buffer : object to be converted
Returns
-------
str_filepath_or_buffer : maybe a string version of the object
Notes
-----
Objects supporting the fspath protocol (python 3.6+) are coerced
according to its __fspath__ method.
For backwards compatibility with older pythons, pathlib.Path and
py.path objects are specially coerced.
Any other object is passed through unchanged, which includes bytes,
strings, buffers, or anything else that's not even path-like.
"""
try:
import pathlib
_PATHLIB_INSTALLED = True # depends on [control=['try'], data=[]]
except ImportError:
_PATHLIB_INSTALLED = False # depends on [control=['except'], data=[]]
try:
from py.path import local as LocalPath
_PY_PATH_INSTALLED = True # depends on [control=['try'], data=[]]
except ImportError:
_PY_PATH_INSTALLED = False # depends on [control=['except'], data=[]]
if hasattr(filepath_or_buffer, '__fspath__'):
return filepath_or_buffer.__fspath__() # depends on [control=['if'], data=[]]
if _PATHLIB_INSTALLED and isinstance(filepath_or_buffer, pathlib.Path):
return str(filepath_or_buffer) # depends on [control=['if'], data=[]]
if _PY_PATH_INSTALLED and isinstance(filepath_or_buffer, LocalPath):
return filepath_or_buffer.strpath # depends on [control=['if'], data=[]]
return _expand_user(filepath_or_buffer) |
def get_arcs(analysis):
"""
Hit stats for each branch.
Returns a flat list where every four values represent a branch:
1. line-number
2. block-number (not used)
3. branch-number
4. hits (we only get 1/0 from coverage.py)
"""
if not analysis.has_arcs():
return None
branch_lines = analysis.branch_lines()
branches = []
for l1, l2 in analysis.arcs_executed():
if l1 in branch_lines:
branches.extend((l1, 0, abs(l2), 1))
for l1, l2 in analysis.arcs_missing():
if l1 in branch_lines:
branches.extend((l1, 0, abs(l2), 0))
return branches | def function[get_arcs, parameter[analysis]]:
constant[
Hit stats for each branch.
Returns a flat list where every four values represent a branch:
1. line-number
2. block-number (not used)
3. branch-number
4. hits (we only get 1/0 from coverage.py)
]
if <ast.UnaryOp object at 0x7da2044c3910> begin[:]
return[constant[None]]
variable[branch_lines] assign[=] call[name[analysis].branch_lines, parameter[]]
variable[branches] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2044c2b60>, <ast.Name object at 0x7da2044c0a90>]]] in starred[call[name[analysis].arcs_executed, parameter[]]] begin[:]
if compare[name[l1] in name[branch_lines]] begin[:]
call[name[branches].extend, parameter[tuple[[<ast.Name object at 0x7da2044c2590>, <ast.Constant object at 0x7da2044c1090>, <ast.Call object at 0x7da2044c1450>, <ast.Constant object at 0x7da20c6c5cc0>]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6c5000>, <ast.Name object at 0x7da20c6c5330>]]] in starred[call[name[analysis].arcs_missing, parameter[]]] begin[:]
if compare[name[l1] in name[branch_lines]] begin[:]
call[name[branches].extend, parameter[tuple[[<ast.Name object at 0x7da20c6c7c40>, <ast.Constant object at 0x7da20c6c5540>, <ast.Call object at 0x7da20c6c41c0>, <ast.Constant object at 0x7da20c6c4d90>]]]]
return[name[branches]] | keyword[def] identifier[get_arcs] ( identifier[analysis] ):
literal[string]
keyword[if] keyword[not] identifier[analysis] . identifier[has_arcs] ():
keyword[return] keyword[None]
identifier[branch_lines] = identifier[analysis] . identifier[branch_lines] ()
identifier[branches] =[]
keyword[for] identifier[l1] , identifier[l2] keyword[in] identifier[analysis] . identifier[arcs_executed] ():
keyword[if] identifier[l1] keyword[in] identifier[branch_lines] :
identifier[branches] . identifier[extend] (( identifier[l1] , literal[int] , identifier[abs] ( identifier[l2] ), literal[int] ))
keyword[for] identifier[l1] , identifier[l2] keyword[in] identifier[analysis] . identifier[arcs_missing] ():
keyword[if] identifier[l1] keyword[in] identifier[branch_lines] :
identifier[branches] . identifier[extend] (( identifier[l1] , literal[int] , identifier[abs] ( identifier[l2] ), literal[int] ))
keyword[return] identifier[branches] | def get_arcs(analysis):
"""
Hit stats for each branch.
Returns a flat list where every four values represent a branch:
1. line-number
2. block-number (not used)
3. branch-number
4. hits (we only get 1/0 from coverage.py)
"""
if not analysis.has_arcs():
return None # depends on [control=['if'], data=[]]
branch_lines = analysis.branch_lines()
branches = []
for (l1, l2) in analysis.arcs_executed():
if l1 in branch_lines:
branches.extend((l1, 0, abs(l2), 1)) # depends on [control=['if'], data=['l1']] # depends on [control=['for'], data=[]]
for (l1, l2) in analysis.arcs_missing():
if l1 in branch_lines:
branches.extend((l1, 0, abs(l2), 0)) # depends on [control=['if'], data=['l1']] # depends on [control=['for'], data=[]]
return branches |
def handle_class(signature_node, module, object_name, cache):
"""
Styles ``autoclass`` entries.
Adds ``abstract`` prefix to abstract classes.
"""
class_ = getattr(module, object_name, None)
if class_ is None:
return
if class_ not in cache:
cache[class_] = {}
attributes = inspect.classify_class_attrs(class_)
for attribute in attributes:
cache[class_][attribute.name] = attribute
if inspect.isabstract(class_):
emphasis = nodes.emphasis("abstract ", "abstract ", classes=["property"])
signature_node.insert(0, emphasis) | def function[handle_class, parameter[signature_node, module, object_name, cache]]:
constant[
Styles ``autoclass`` entries.
Adds ``abstract`` prefix to abstract classes.
]
variable[class_] assign[=] call[name[getattr], parameter[name[module], name[object_name], constant[None]]]
if compare[name[class_] is constant[None]] begin[:]
return[None]
if compare[name[class_] <ast.NotIn object at 0x7da2590d7190> name[cache]] begin[:]
call[name[cache]][name[class_]] assign[=] dictionary[[], []]
variable[attributes] assign[=] call[name[inspect].classify_class_attrs, parameter[name[class_]]]
for taget[name[attribute]] in starred[name[attributes]] begin[:]
call[call[name[cache]][name[class_]]][name[attribute].name] assign[=] name[attribute]
if call[name[inspect].isabstract, parameter[name[class_]]] begin[:]
variable[emphasis] assign[=] call[name[nodes].emphasis, parameter[constant[abstract ], constant[abstract ]]]
call[name[signature_node].insert, parameter[constant[0], name[emphasis]]] | keyword[def] identifier[handle_class] ( identifier[signature_node] , identifier[module] , identifier[object_name] , identifier[cache] ):
literal[string]
identifier[class_] = identifier[getattr] ( identifier[module] , identifier[object_name] , keyword[None] )
keyword[if] identifier[class_] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[class_] keyword[not] keyword[in] identifier[cache] :
identifier[cache] [ identifier[class_] ]={}
identifier[attributes] = identifier[inspect] . identifier[classify_class_attrs] ( identifier[class_] )
keyword[for] identifier[attribute] keyword[in] identifier[attributes] :
identifier[cache] [ identifier[class_] ][ identifier[attribute] . identifier[name] ]= identifier[attribute]
keyword[if] identifier[inspect] . identifier[isabstract] ( identifier[class_] ):
identifier[emphasis] = identifier[nodes] . identifier[emphasis] ( literal[string] , literal[string] , identifier[classes] =[ literal[string] ])
identifier[signature_node] . identifier[insert] ( literal[int] , identifier[emphasis] ) | def handle_class(signature_node, module, object_name, cache):
"""
Styles ``autoclass`` entries.
Adds ``abstract`` prefix to abstract classes.
"""
class_ = getattr(module, object_name, None)
if class_ is None:
return # depends on [control=['if'], data=[]]
if class_ not in cache:
cache[class_] = {}
attributes = inspect.classify_class_attrs(class_)
for attribute in attributes:
cache[class_][attribute.name] = attribute # depends on [control=['for'], data=['attribute']] # depends on [control=['if'], data=['class_', 'cache']]
if inspect.isabstract(class_):
emphasis = nodes.emphasis('abstract ', 'abstract ', classes=['property'])
signature_node.insert(0, emphasis) # depends on [control=['if'], data=[]] |
def create_free_shipping(cls, free_shipping, **kwargs):
"""Create FreeShipping
Create a new FreeShipping
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_free_shipping(free_shipping, async=True)
>>> result = thread.get()
:param async bool
:param FreeShipping free_shipping: Attributes of freeShipping to create (required)
:return: FreeShipping
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_free_shipping_with_http_info(free_shipping, **kwargs)
else:
(data) = cls._create_free_shipping_with_http_info(free_shipping, **kwargs)
return data | def function[create_free_shipping, parameter[cls, free_shipping]]:
constant[Create FreeShipping
Create a new FreeShipping
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_free_shipping(free_shipping, async=True)
>>> result = thread.get()
:param async bool
:param FreeShipping free_shipping: Attributes of freeShipping to create (required)
:return: FreeShipping
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._create_free_shipping_with_http_info, parameter[name[free_shipping]]]] | keyword[def] identifier[create_free_shipping] ( identifier[cls] , identifier[free_shipping] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_create_free_shipping_with_http_info] ( identifier[free_shipping] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_create_free_shipping_with_http_info] ( identifier[free_shipping] ,** identifier[kwargs] )
keyword[return] identifier[data] | def create_free_shipping(cls, free_shipping, **kwargs):
"""Create FreeShipping
Create a new FreeShipping
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_free_shipping(free_shipping, async=True)
>>> result = thread.get()
:param async bool
:param FreeShipping free_shipping: Attributes of freeShipping to create (required)
:return: FreeShipping
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_free_shipping_with_http_info(free_shipping, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._create_free_shipping_with_http_info(free_shipping, **kwargs)
return data |
def _set_serializer_by_mime_type(self, mime_type):
"""
:param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer
"""
# ignore if binary response
if isinstance(self._app_iter, BinaryResponse):
self.logger.info("ignoring setting serializer for binary response")
return
for available_serializer in self._serializers:
if available_serializer.content_type() == mime_type:
self._selected_serializer = available_serializer
self.logger.info("set serializer for mime type: %s" % mime_type)
return
self.logger.info("could not find serializer for mime type: %s" % mime_type)
raise exception.UnsupportedVocabularyError(mime_type, self.supported_mime_types_str) | def function[_set_serializer_by_mime_type, parameter[self, mime_type]]:
constant[
:param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer
]
if call[name[isinstance], parameter[name[self]._app_iter, name[BinaryResponse]]] begin[:]
call[name[self].logger.info, parameter[constant[ignoring setting serializer for binary response]]]
return[None]
for taget[name[available_serializer]] in starred[name[self]._serializers] begin[:]
if compare[call[name[available_serializer].content_type, parameter[]] equal[==] name[mime_type]] begin[:]
name[self]._selected_serializer assign[=] name[available_serializer]
call[name[self].logger.info, parameter[binary_operation[constant[set serializer for mime type: %s] <ast.Mod object at 0x7da2590d6920> name[mime_type]]]]
return[None]
call[name[self].logger.info, parameter[binary_operation[constant[could not find serializer for mime type: %s] <ast.Mod object at 0x7da2590d6920> name[mime_type]]]]
<ast.Raise object at 0x7da1b0a2f610> | keyword[def] identifier[_set_serializer_by_mime_type] ( identifier[self] , identifier[mime_type] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_app_iter] , identifier[BinaryResponse] ):
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
keyword[return]
keyword[for] identifier[available_serializer] keyword[in] identifier[self] . identifier[_serializers] :
keyword[if] identifier[available_serializer] . identifier[content_type] ()== identifier[mime_type] :
identifier[self] . identifier[_selected_serializer] = identifier[available_serializer]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[mime_type] )
keyword[return]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[mime_type] )
keyword[raise] identifier[exception] . identifier[UnsupportedVocabularyError] ( identifier[mime_type] , identifier[self] . identifier[supported_mime_types_str] ) | def _set_serializer_by_mime_type(self, mime_type):
"""
:param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer
"""
# ignore if binary response
if isinstance(self._app_iter, BinaryResponse):
self.logger.info('ignoring setting serializer for binary response')
return # depends on [control=['if'], data=[]]
for available_serializer in self._serializers:
if available_serializer.content_type() == mime_type:
self._selected_serializer = available_serializer
self.logger.info('set serializer for mime type: %s' % mime_type)
return # depends on [control=['if'], data=['mime_type']] # depends on [control=['for'], data=['available_serializer']]
self.logger.info('could not find serializer for mime type: %s' % mime_type)
raise exception.UnsupportedVocabularyError(mime_type, self.supported_mime_types_str) |
def _set_version(self, v, load=False):
"""
Setter method for version, mapped from YANG variable /interface/port_channel/hide_vrrp_holer/vrrp/version (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_version is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_version() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'2..3']}), is_leaf=True, yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set VRRP version 2/3', u'key-default': u'2', u'cli-expose-key-name': None, u'cli-hide-in-submode': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """version must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'2..3']}), is_leaf=True, yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set VRRP version 2/3', u'key-default': u'2', u'cli-expose-key-name': None, u'cli-hide-in-submode': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='uint8', is_config=True)""",
})
self.__version = t
if hasattr(self, '_set'):
self._set() | def function[_set_version, parameter[self, v, load]]:
constant[
Setter method for version, mapped from YANG variable /interface/port_channel/hide_vrrp_holer/vrrp/version (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_version is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_version() directly.
]
variable[parent] assign[=] call[name[getattr], parameter[name[self], constant[_parent], constant[None]]]
if <ast.BoolOp object at 0x7da1b256d120> begin[:]
<ast.Raise object at 0x7da1b256df90>
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b256ec50>
name[self].__version assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_version] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
identifier[parent] = identifier[getattr] ( identifier[self] , literal[string] , keyword[None] )
keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] keyword[and] identifier[load] keyword[is] keyword[False] :
keyword[raise] identifier[AttributeError] ( literal[string] +
literal[string] )
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[int] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[is_keyval] = keyword[True] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__version] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_version(self, v, load=False):
"""
Setter method for version, mapped from YANG variable /interface/port_channel/hide_vrrp_holer/vrrp/version (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_version is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_version() directly.
"""
parent = getattr(self, '_parent', None)
if parent is not None and load is False:
raise AttributeError('Cannot set keys directly when' + ' within an instantiated list') # depends on [control=['if'], data=[]]
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'2..3']}), is_leaf=True, yang_name='version', rest_name='version', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set VRRP version 2/3', u'key-default': u'2', u'cli-expose-key-name': None, u'cli-hide-in-submode': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='uint8', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'version must be of a type compatible with uint8', 'defined-type': 'uint8', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={\'range\': [\'0..255\']}, int_size=8), restriction_dict={\'range\': [u\'2..3\']}), is_leaf=True, yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Set VRRP version 2/3\', u\'key-default\': u\'2\', u\'cli-expose-key-name\': None, u\'cli-hide-in-submode\': None}}, is_keyval=True, namespace=\'urn:brocade.com:mgmt:brocade-vrrp\', defining_module=\'brocade-vrrp\', yang_type=\'uint8\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__version = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def bulk_insert(self, resource, docs, **kwargs):
"""Bulk insert documents."""
kwargs.update(self._es_args(resource))
parent_type = self._get_parent_type(resource)
if parent_type:
for doc in docs:
if doc.get(parent_type.get('field')):
doc['_parent'] = doc.get(parent_type.get('field'))
res = bulk(self.elastic(resource), docs, stats_only=False, **kwargs)
self._refresh_resource_index(resource)
return res | def function[bulk_insert, parameter[self, resource, docs]]:
constant[Bulk insert documents.]
call[name[kwargs].update, parameter[call[name[self]._es_args, parameter[name[resource]]]]]
variable[parent_type] assign[=] call[name[self]._get_parent_type, parameter[name[resource]]]
if name[parent_type] begin[:]
for taget[name[doc]] in starred[name[docs]] begin[:]
if call[name[doc].get, parameter[call[name[parent_type].get, parameter[constant[field]]]]] begin[:]
call[name[doc]][constant[_parent]] assign[=] call[name[doc].get, parameter[call[name[parent_type].get, parameter[constant[field]]]]]
variable[res] assign[=] call[name[bulk], parameter[call[name[self].elastic, parameter[name[resource]]], name[docs]]]
call[name[self]._refresh_resource_index, parameter[name[resource]]]
return[name[res]] | keyword[def] identifier[bulk_insert] ( identifier[self] , identifier[resource] , identifier[docs] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[update] ( identifier[self] . identifier[_es_args] ( identifier[resource] ))
identifier[parent_type] = identifier[self] . identifier[_get_parent_type] ( identifier[resource] )
keyword[if] identifier[parent_type] :
keyword[for] identifier[doc] keyword[in] identifier[docs] :
keyword[if] identifier[doc] . identifier[get] ( identifier[parent_type] . identifier[get] ( literal[string] )):
identifier[doc] [ literal[string] ]= identifier[doc] . identifier[get] ( identifier[parent_type] . identifier[get] ( literal[string] ))
identifier[res] = identifier[bulk] ( identifier[self] . identifier[elastic] ( identifier[resource] ), identifier[docs] , identifier[stats_only] = keyword[False] ,** identifier[kwargs] )
identifier[self] . identifier[_refresh_resource_index] ( identifier[resource] )
keyword[return] identifier[res] | def bulk_insert(self, resource, docs, **kwargs):
"""Bulk insert documents."""
kwargs.update(self._es_args(resource))
parent_type = self._get_parent_type(resource)
if parent_type:
for doc in docs:
if doc.get(parent_type.get('field')):
doc['_parent'] = doc.get(parent_type.get('field')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['doc']] # depends on [control=['if'], data=[]]
res = bulk(self.elastic(resource), docs, stats_only=False, **kwargs)
self._refresh_resource_index(resource)
return res |
def _udf_get_file_from_iso_fp(self, outfp, blocksize, udf_path):
# type: (BinaryIO, int, bytes) -> None
'''
An internal method to fetch a single UDF file from the ISO and write it
out to the file object.
Parameters:
outfp - The file object to write data to.
blocksize - The number of bytes in each transfer.
udf_path - The absolute UDF path to lookup on the ISO.
Returns:
Nothing.
'''
if self.udf_root is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot fetch a udf_path from a non-UDF ISO')
(ident_unused, found_file_entry) = self._find_udf_record(udf_path)
if found_file_entry is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot get the contents of an empty UDF File Entry')
if not found_file_entry.is_file():
raise pycdlibexception.PyCdlibInvalidInput('Can only write out a file')
if found_file_entry.inode is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot write out an entry without data')
if found_file_entry.get_data_length() > 0:
with inode.InodeOpenData(found_file_entry.inode, self.pvd.logical_block_size()) as (data_fp, data_len):
utils.copy_data(data_len, blocksize, data_fp, outfp) | def function[_udf_get_file_from_iso_fp, parameter[self, outfp, blocksize, udf_path]]:
constant[
An internal method to fetch a single UDF file from the ISO and write it
out to the file object.
Parameters:
outfp - The file object to write data to.
blocksize - The number of bytes in each transfer.
udf_path - The absolute UDF path to lookup on the ISO.
Returns:
Nothing.
]
if compare[name[self].udf_root is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0ff2ec0>
<ast.Tuple object at 0x7da1b0ff1ba0> assign[=] call[name[self]._find_udf_record, parameter[name[udf_path]]]
if compare[name[found_file_entry] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0ff22c0>
if <ast.UnaryOp object at 0x7da1b0ff2c80> begin[:]
<ast.Raise object at 0x7da1b0ff30a0>
if compare[name[found_file_entry].inode is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0ff2380>
if compare[call[name[found_file_entry].get_data_length, parameter[]] greater[>] constant[0]] begin[:]
with call[name[inode].InodeOpenData, parameter[name[found_file_entry].inode, call[name[self].pvd.logical_block_size, parameter[]]]] begin[:]
call[name[utils].copy_data, parameter[name[data_len], name[blocksize], name[data_fp], name[outfp]]] | keyword[def] identifier[_udf_get_file_from_iso_fp] ( identifier[self] , identifier[outfp] , identifier[blocksize] , identifier[udf_path] ):
literal[string]
keyword[if] identifier[self] . identifier[udf_root] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
( identifier[ident_unused] , identifier[found_file_entry] )= identifier[self] . identifier[_find_udf_record] ( identifier[udf_path] )
keyword[if] identifier[found_file_entry] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] keyword[not] identifier[found_file_entry] . identifier[is_file] ():
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] identifier[found_file_entry] . identifier[inode] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] identifier[found_file_entry] . identifier[get_data_length] ()> literal[int] :
keyword[with] identifier[inode] . identifier[InodeOpenData] ( identifier[found_file_entry] . identifier[inode] , identifier[self] . identifier[pvd] . identifier[logical_block_size] ()) keyword[as] ( identifier[data_fp] , identifier[data_len] ):
identifier[utils] . identifier[copy_data] ( identifier[data_len] , identifier[blocksize] , identifier[data_fp] , identifier[outfp] ) | def _udf_get_file_from_iso_fp(self, outfp, blocksize, udf_path):
# type: (BinaryIO, int, bytes) -> None
'\n An internal method to fetch a single UDF file from the ISO and write it\n out to the file object.\n\n Parameters:\n outfp - The file object to write data to.\n blocksize - The number of bytes in each transfer.\n udf_path - The absolute UDF path to lookup on the ISO.\n Returns:\n Nothing.\n '
if self.udf_root is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot fetch a udf_path from a non-UDF ISO') # depends on [control=['if'], data=[]]
(ident_unused, found_file_entry) = self._find_udf_record(udf_path)
if found_file_entry is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot get the contents of an empty UDF File Entry') # depends on [control=['if'], data=[]]
if not found_file_entry.is_file():
raise pycdlibexception.PyCdlibInvalidInput('Can only write out a file') # depends on [control=['if'], data=[]]
if found_file_entry.inode is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot write out an entry without data') # depends on [control=['if'], data=[]]
if found_file_entry.get_data_length() > 0:
with inode.InodeOpenData(found_file_entry.inode, self.pvd.logical_block_size()) as (data_fp, data_len):
utils.copy_data(data_len, blocksize, data_fp, outfp) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] |
def set_min_priority(self, infohash_list):
"""
Set torrents to minimum priority level.
:param infohash_list: Single or list() of infohashes.
"""
data = self._process_infohash_list(infohash_list)
return self._post('command/bottomPrio', data=data) | def function[set_min_priority, parameter[self, infohash_list]]:
constant[
Set torrents to minimum priority level.
:param infohash_list: Single or list() of infohashes.
]
variable[data] assign[=] call[name[self]._process_infohash_list, parameter[name[infohash_list]]]
return[call[name[self]._post, parameter[constant[command/bottomPrio]]]] | keyword[def] identifier[set_min_priority] ( identifier[self] , identifier[infohash_list] ):
literal[string]
identifier[data] = identifier[self] . identifier[_process_infohash_list] ( identifier[infohash_list] )
keyword[return] identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[data] ) | def set_min_priority(self, infohash_list):
"""
Set torrents to minimum priority level.
:param infohash_list: Single or list() of infohashes.
"""
data = self._process_infohash_list(infohash_list)
return self._post('command/bottomPrio', data=data) |
def check_complicance(self):
"""Check compliance with Media RSS Specification, Version 1.5.1.
see http://www.rssboard.org/media-rss
Raises AttributeError on error.
"""
# check Media RSS requirement: one of the following elements is
# required: media_group | media_content | media_player | media_peerLink
# | media_location. We do the check only if any media_... element is
# set to allow non media feeds
if(any([ma for ma in vars(self)
if ma.startswith('media_') and getattr(self, ma)])
and not self.media_group
and not self.media_content
and not self.media_player
and not self.media_peerLink
and not self.media_location
):
raise AttributeError(
"Using media elements requires the specification of at least "
"one of the following elements: 'media_group', "
"'media_content', 'media_player', 'media_peerLink' or "
"'media_location'.")
# check Media RSS requirement: if media:player is missing all
# media_content elements need to have url attributes.
if not self.media_player:
if self.media_content:
# check if all media_content elements have a URL set
if isinstance(self.media_content, list):
if not all([False for mc in self.media_content if
'url' not in mc.element_attrs]):
raise AttributeError(
"MediaRSSItems require a media_player attribute "
"if a media_content has no url set.")
else:
if not self.media_content.element_attrs['url']:
raise AttributeError(
"MediaRSSItems require a media_player attribute "
"if a media_content has no url set.")
pass
elif self.media_group:
# check media groups without player if its media_content
# elements have a URL set
raise NotImplementedError(
"MediaRSSItem: media_group check not implemented yet.") | def function[check_complicance, parameter[self]]:
constant[Check compliance with Media RSS Specification, Version 1.5.1.
see http://www.rssboard.org/media-rss
Raises AttributeError on error.
]
if <ast.BoolOp object at 0x7da20e955c00> begin[:]
<ast.Raise object at 0x7da20e9567a0>
if <ast.UnaryOp object at 0x7da20e9550f0> begin[:]
if name[self].media_content begin[:]
if call[name[isinstance], parameter[name[self].media_content, name[list]]] begin[:]
if <ast.UnaryOp object at 0x7da20e956560> begin[:]
<ast.Raise object at 0x7da20e9556c0>
pass | keyword[def] identifier[check_complicance] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[any] ([ identifier[ma] keyword[for] identifier[ma] keyword[in] identifier[vars] ( identifier[self] )
keyword[if] identifier[ma] . identifier[startswith] ( literal[string] ) keyword[and] identifier[getattr] ( identifier[self] , identifier[ma] )])
keyword[and] keyword[not] identifier[self] . identifier[media_group]
keyword[and] keyword[not] identifier[self] . identifier[media_content]
keyword[and] keyword[not] identifier[self] . identifier[media_player]
keyword[and] keyword[not] identifier[self] . identifier[media_peerLink]
keyword[and] keyword[not] identifier[self] . identifier[media_location]
):
keyword[raise] identifier[AttributeError] (
literal[string]
literal[string]
literal[string]
literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[media_player] :
keyword[if] identifier[self] . identifier[media_content] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[media_content] , identifier[list] ):
keyword[if] keyword[not] identifier[all] ([ keyword[False] keyword[for] identifier[mc] keyword[in] identifier[self] . identifier[media_content] keyword[if]
literal[string] keyword[not] keyword[in] identifier[mc] . identifier[element_attrs] ]):
keyword[raise] identifier[AttributeError] (
literal[string]
literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[media_content] . identifier[element_attrs] [ literal[string] ]:
keyword[raise] identifier[AttributeError] (
literal[string]
literal[string] )
keyword[pass]
keyword[elif] identifier[self] . identifier[media_group] :
keyword[raise] identifier[NotImplementedError] (
literal[string] ) | def check_complicance(self):
"""Check compliance with Media RSS Specification, Version 1.5.1.
see http://www.rssboard.org/media-rss
Raises AttributeError on error.
"""
# check Media RSS requirement: one of the following elements is
# required: media_group | media_content | media_player | media_peerLink
# | media_location. We do the check only if any media_... element is
# set to allow non media feeds
if any([ma for ma in vars(self) if ma.startswith('media_') and getattr(self, ma)]) and (not self.media_group) and (not self.media_content) and (not self.media_player) and (not self.media_peerLink) and (not self.media_location):
raise AttributeError("Using media elements requires the specification of at least one of the following elements: 'media_group', 'media_content', 'media_player', 'media_peerLink' or 'media_location'.") # depends on [control=['if'], data=[]]
# check Media RSS requirement: if media:player is missing all
# media_content elements need to have url attributes.
if not self.media_player:
if self.media_content:
# check if all media_content elements have a URL set
if isinstance(self.media_content, list):
if not all([False for mc in self.media_content if 'url' not in mc.element_attrs]):
raise AttributeError('MediaRSSItems require a media_player attribute if a media_content has no url set.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not self.media_content.element_attrs['url']:
raise AttributeError('MediaRSSItems require a media_player attribute if a media_content has no url set.') # depends on [control=['if'], data=[]]
pass # depends on [control=['if'], data=[]]
elif self.media_group:
# check media groups without player if its media_content
# elements have a URL set
raise NotImplementedError('MediaRSSItem: media_group check not implemented yet.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def cond_remove_all(ol,**kwargs):
'''
from elist.elist import *
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
def afterCH(ele,ch):
cond = (ord(str(ele)) > ord(ch))
return(cond)
new = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'])
ol
new
id(ol)
id(new)
####
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
rslt = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'],mode='original')
ol
rslt
id(ol)
id(rslt)
'''
cond_func = kwargs['cond_func']
if('cond_func_args' in kwargs):
cond_func_args = kwargs['cond_func_args']
else:
cond_func_args = []
if('mode' in kwargs):
mode = kwargs["mode"]
else:
mode = "new"
new = copy.deepcopy(ol)
selected = find_all(new,cond_func,*cond_func_args)
selected_indexes = array_map(selected,lambda ele:ele['index'])
new = pop_indexes(new,selected_indexes)['list']
if(mode == "new"):
return(new)
else:
ol.clear()
ol.extend(new)
return(ol) | def function[cond_remove_all, parameter[ol]]:
constant[
from elist.elist import *
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
def afterCH(ele,ch):
cond = (ord(str(ele)) > ord(ch))
return(cond)
new = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'])
ol
new
id(ol)
id(new)
####
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
rslt = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'],mode='original')
ol
rslt
id(ol)
id(rslt)
]
variable[cond_func] assign[=] call[name[kwargs]][constant[cond_func]]
if compare[constant[cond_func_args] in name[kwargs]] begin[:]
variable[cond_func_args] assign[=] call[name[kwargs]][constant[cond_func_args]]
if compare[constant[mode] in name[kwargs]] begin[:]
variable[mode] assign[=] call[name[kwargs]][constant[mode]]
variable[new] assign[=] call[name[copy].deepcopy, parameter[name[ol]]]
variable[selected] assign[=] call[name[find_all], parameter[name[new], name[cond_func], <ast.Starred object at 0x7da1affd6d70>]]
variable[selected_indexes] assign[=] call[name[array_map], parameter[name[selected], <ast.Lambda object at 0x7da1affd49d0>]]
variable[new] assign[=] call[call[name[pop_indexes], parameter[name[new], name[selected_indexes]]]][constant[list]]
if compare[name[mode] equal[==] constant[new]] begin[:]
return[name[new]] | keyword[def] identifier[cond_remove_all] ( identifier[ol] ,** identifier[kwargs] ):
literal[string]
identifier[cond_func] = identifier[kwargs] [ literal[string] ]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[cond_func_args] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[cond_func_args] =[]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[mode] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[mode] = literal[string]
identifier[new] = identifier[copy] . identifier[deepcopy] ( identifier[ol] )
identifier[selected] = identifier[find_all] ( identifier[new] , identifier[cond_func] ,* identifier[cond_func_args] )
identifier[selected_indexes] = identifier[array_map] ( identifier[selected] , keyword[lambda] identifier[ele] : identifier[ele] [ literal[string] ])
identifier[new] = identifier[pop_indexes] ( identifier[new] , identifier[selected_indexes] )[ literal[string] ]
keyword[if] ( identifier[mode] == literal[string] ):
keyword[return] ( identifier[new] )
keyword[else] :
identifier[ol] . identifier[clear] ()
identifier[ol] . identifier[extend] ( identifier[new] )
keyword[return] ( identifier[ol] ) | def cond_remove_all(ol, **kwargs):
"""
from elist.elist import *
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
def afterCH(ele,ch):
cond = (ord(str(ele)) > ord(ch))
return(cond)
new = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'])
ol
new
id(ol)
id(new)
####
ol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]
id(ol)
rslt = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'],mode='original')
ol
rslt
id(ol)
id(rslt)
"""
cond_func = kwargs['cond_func']
if 'cond_func_args' in kwargs:
cond_func_args = kwargs['cond_func_args'] # depends on [control=['if'], data=['kwargs']]
else:
cond_func_args = []
if 'mode' in kwargs:
mode = kwargs['mode'] # depends on [control=['if'], data=['kwargs']]
else:
mode = 'new'
new = copy.deepcopy(ol)
selected = find_all(new, cond_func, *cond_func_args)
selected_indexes = array_map(selected, lambda ele: ele['index'])
new = pop_indexes(new, selected_indexes)['list']
if mode == 'new':
return new # depends on [control=['if'], data=[]]
else:
ol.clear()
ol.extend(new)
return ol |
def replace(text, old, new, count=None, strip=False):
'''
Replace an ``old`` subset of ``text`` with ``new``.
``old`` type may be either a string or regular expression.
If ``strip``, remove all leading/trailing whitespace.
If ``count``, replace the specified number of occurence, otherwise replace all.
'''
if is_string(old):
text = text.replace(old, new, -1 if count is None else count)
else:
text = old.sub(new, text, 0 if count is None else count)
if strip:
text = text.strip(None if strip == True else strip)
return text | def function[replace, parameter[text, old, new, count, strip]]:
constant[
Replace an ``old`` subset of ``text`` with ``new``.
``old`` type may be either a string or regular expression.
If ``strip``, remove all leading/trailing whitespace.
If ``count``, replace the specified number of occurence, otherwise replace all.
]
if call[name[is_string], parameter[name[old]]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[name[old], name[new], <ast.IfExp object at 0x7da1b0a489a0>]]
if name[strip] begin[:]
variable[text] assign[=] call[name[text].strip, parameter[<ast.IfExp object at 0x7da1b0ae0610>]]
return[name[text]] | keyword[def] identifier[replace] ( identifier[text] , identifier[old] , identifier[new] , identifier[count] = keyword[None] , identifier[strip] = keyword[False] ):
literal[string]
keyword[if] identifier[is_string] ( identifier[old] ):
identifier[text] = identifier[text] . identifier[replace] ( identifier[old] , identifier[new] ,- literal[int] keyword[if] identifier[count] keyword[is] keyword[None] keyword[else] identifier[count] )
keyword[else] :
identifier[text] = identifier[old] . identifier[sub] ( identifier[new] , identifier[text] , literal[int] keyword[if] identifier[count] keyword[is] keyword[None] keyword[else] identifier[count] )
keyword[if] identifier[strip] :
identifier[text] = identifier[text] . identifier[strip] ( keyword[None] keyword[if] identifier[strip] == keyword[True] keyword[else] identifier[strip] )
keyword[return] identifier[text] | def replace(text, old, new, count=None, strip=False):
"""
Replace an ``old`` subset of ``text`` with ``new``.
``old`` type may be either a string or regular expression.
If ``strip``, remove all leading/trailing whitespace.
If ``count``, replace the specified number of occurence, otherwise replace all.
"""
if is_string(old):
text = text.replace(old, new, -1 if count is None else count) # depends on [control=['if'], data=[]]
else:
text = old.sub(new, text, 0 if count is None else count)
if strip:
text = text.strip(None if strip == True else strip) # depends on [control=['if'], data=[]]
return text |
def utc_datetime(dt=None, local_value=True):
""" Convert local datetime and/or datetime without timezone information to UTC datetime with timezone
information.
:param dt: local datetime to convert. If is None, then system datetime value is used
:param local_value: whether dt is a datetime in system timezone or UTC datetime without timezone information
:return: datetime in UTC with tz set
"""
# TODO: rename local_value to local_tz or in_local_tz
if dt is None:
return datetime.now(tz=timezone.utc)
result = dt
if result.utcoffset() is None:
if local_value is False:
return result.replace(tzinfo=timezone.utc)
else:
result = result.replace(tzinfo=local_tz())
return result.astimezone(timezone.utc) | def function[utc_datetime, parameter[dt, local_value]]:
constant[ Convert local datetime and/or datetime without timezone information to UTC datetime with timezone
information.
:param dt: local datetime to convert. If is None, then system datetime value is used
:param local_value: whether dt is a datetime in system timezone or UTC datetime without timezone information
:return: datetime in UTC with tz set
]
if compare[name[dt] is constant[None]] begin[:]
return[call[name[datetime].now, parameter[]]]
variable[result] assign[=] name[dt]
if compare[call[name[result].utcoffset, parameter[]] is constant[None]] begin[:]
if compare[name[local_value] is constant[False]] begin[:]
return[call[name[result].replace, parameter[]]]
return[call[name[result].astimezone, parameter[name[timezone].utc]]] | keyword[def] identifier[utc_datetime] ( identifier[dt] = keyword[None] , identifier[local_value] = keyword[True] ):
literal[string]
keyword[if] identifier[dt] keyword[is] keyword[None] :
keyword[return] identifier[datetime] . identifier[now] ( identifier[tz] = identifier[timezone] . identifier[utc] )
identifier[result] = identifier[dt]
keyword[if] identifier[result] . identifier[utcoffset] () keyword[is] keyword[None] :
keyword[if] identifier[local_value] keyword[is] keyword[False] :
keyword[return] identifier[result] . identifier[replace] ( identifier[tzinfo] = identifier[timezone] . identifier[utc] )
keyword[else] :
identifier[result] = identifier[result] . identifier[replace] ( identifier[tzinfo] = identifier[local_tz] ())
keyword[return] identifier[result] . identifier[astimezone] ( identifier[timezone] . identifier[utc] ) | def utc_datetime(dt=None, local_value=True):
""" Convert local datetime and/or datetime without timezone information to UTC datetime with timezone
information.
:param dt: local datetime to convert. If is None, then system datetime value is used
:param local_value: whether dt is a datetime in system timezone or UTC datetime without timezone information
:return: datetime in UTC with tz set
""" # TODO: rename local_value to local_tz or in_local_tz
if dt is None:
return datetime.now(tz=timezone.utc) # depends on [control=['if'], data=[]]
result = dt
if result.utcoffset() is None:
if local_value is False:
return result.replace(tzinfo=timezone.utc) # depends on [control=['if'], data=[]]
else:
result = result.replace(tzinfo=local_tz()) # depends on [control=['if'], data=[]]
return result.astimezone(timezone.utc) |
def pipe_item_split(tokens, loc):
"""Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter."""
# list implies artificial tokens, which must be expr
if isinstance(tokens, list) or "expr" in tokens:
internal_assert(len(tokens) == 1, "invalid expr pipe item tokens", tokens)
return "expr", (tokens[0],)
elif "partial" in tokens:
func, args = tokens
pos_args, star_args, kwd_args, dubstar_args = split_function_call(args, loc)
return "partial", (func, join_args(pos_args, star_args), join_args(kwd_args, dubstar_args))
elif "attrgetter" in tokens:
name, args = attrgetter_atom_split(tokens)
return "attrgetter", (name, args)
elif "itemgetter" in tokens:
op, args = tokens
return "itemgetter", (op, args)
else:
raise CoconutInternalException("invalid pipe item tokens", tokens) | def function[pipe_item_split, parameter[tokens, loc]]:
constant[Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter.]
if <ast.BoolOp object at 0x7da2047e9ea0> begin[:]
call[name[internal_assert], parameter[compare[call[name[len], parameter[name[tokens]]] equal[==] constant[1]], constant[invalid expr pipe item tokens], name[tokens]]]
return[tuple[[<ast.Constant object at 0x7da2047e8fd0>, <ast.Tuple object at 0x7da2047e98d0>]]] | keyword[def] identifier[pipe_item_split] ( identifier[tokens] , identifier[loc] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[tokens] , identifier[list] ) keyword[or] literal[string] keyword[in] identifier[tokens] :
identifier[internal_assert] ( identifier[len] ( identifier[tokens] )== literal[int] , literal[string] , identifier[tokens] )
keyword[return] literal[string] ,( identifier[tokens] [ literal[int] ],)
keyword[elif] literal[string] keyword[in] identifier[tokens] :
identifier[func] , identifier[args] = identifier[tokens]
identifier[pos_args] , identifier[star_args] , identifier[kwd_args] , identifier[dubstar_args] = identifier[split_function_call] ( identifier[args] , identifier[loc] )
keyword[return] literal[string] ,( identifier[func] , identifier[join_args] ( identifier[pos_args] , identifier[star_args] ), identifier[join_args] ( identifier[kwd_args] , identifier[dubstar_args] ))
keyword[elif] literal[string] keyword[in] identifier[tokens] :
identifier[name] , identifier[args] = identifier[attrgetter_atom_split] ( identifier[tokens] )
keyword[return] literal[string] ,( identifier[name] , identifier[args] )
keyword[elif] literal[string] keyword[in] identifier[tokens] :
identifier[op] , identifier[args] = identifier[tokens]
keyword[return] literal[string] ,( identifier[op] , identifier[args] )
keyword[else] :
keyword[raise] identifier[CoconutInternalException] ( literal[string] , identifier[tokens] ) | def pipe_item_split(tokens, loc):
"""Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter."""
# list implies artificial tokens, which must be expr
if isinstance(tokens, list) or 'expr' in tokens:
internal_assert(len(tokens) == 1, 'invalid expr pipe item tokens', tokens)
return ('expr', (tokens[0],)) # depends on [control=['if'], data=[]]
elif 'partial' in tokens:
(func, args) = tokens
(pos_args, star_args, kwd_args, dubstar_args) = split_function_call(args, loc)
return ('partial', (func, join_args(pos_args, star_args), join_args(kwd_args, dubstar_args))) # depends on [control=['if'], data=['tokens']]
elif 'attrgetter' in tokens:
(name, args) = attrgetter_atom_split(tokens)
return ('attrgetter', (name, args)) # depends on [control=['if'], data=['tokens']]
elif 'itemgetter' in tokens:
(op, args) = tokens
return ('itemgetter', (op, args)) # depends on [control=['if'], data=['tokens']]
else:
raise CoconutInternalException('invalid pipe item tokens', tokens) |
def on_release_key(key, callback, suppress=False):
"""
Invokes `callback` for KEY_UP event related to the given key. For details see `hook`.
"""
return hook_key(key, lambda e: e.event_type == KEY_DOWN or callback(e), suppress=suppress) | def function[on_release_key, parameter[key, callback, suppress]]:
constant[
Invokes `callback` for KEY_UP event related to the given key. For details see `hook`.
]
return[call[name[hook_key], parameter[name[key], <ast.Lambda object at 0x7da1b1bf98a0>]]] | keyword[def] identifier[on_release_key] ( identifier[key] , identifier[callback] , identifier[suppress] = keyword[False] ):
literal[string]
keyword[return] identifier[hook_key] ( identifier[key] , keyword[lambda] identifier[e] : identifier[e] . identifier[event_type] == identifier[KEY_DOWN] keyword[or] identifier[callback] ( identifier[e] ), identifier[suppress] = identifier[suppress] ) | def on_release_key(key, callback, suppress=False):
"""
Invokes `callback` for KEY_UP event related to the given key. For details see `hook`.
"""
return hook_key(key, lambda e: e.event_type == KEY_DOWN or callback(e), suppress=suppress) |
def get_url_from (base_url, recursion_level, aggregate,
parent_url=None, base_ref=None, line=0, column=0, page=0,
name=u"", parent_content_type=None, extern=None):
"""
Get url data from given base data.
@param base_url: base url from a link tag
@type base_url: string or None
@param recursion_level: current recursion level
@type recursion_level: number
@param aggregate: aggregate object
@type aggregate: aggregate.Consumer
@param parent_url: parent url
@type parent_url: string or None
@param base_ref: base url from <base> tag
@type base_ref string or None
@param line: line number
@type line: number
@param column: column number
@type column: number
@param page: page number
@type page: number
@param name: link name
@type name: string
@param extern: (is_extern, is_strict) or None
@type extern: tuple(int, int) or None
"""
if base_url is not None:
base_url = strformat.unicode_safe(base_url)
# left strip for detection of URL scheme
base_url_stripped = base_url.lstrip()
else:
base_url_stripped = base_url
if parent_url is not None:
parent_url = strformat.unicode_safe(parent_url)
if base_ref is not None:
base_ref = strformat.unicode_safe(base_ref)
name = strformat.unicode_safe(name)
url = absolute_url(base_url_stripped, base_ref, parent_url).lower()
if ":" in url:
scheme = url.split(":", 1)[0].lower()
else:
scheme = None
if not (url or name):
# use filename as base url, with slash as path seperator
name = base_url.replace("\\", "/")
allowed_schemes = aggregate.config["allowedschemes"]
# ignore local PHP files with execution directives
local_php = (parent_content_type == 'application/x-httpd-php' and
'<?' in base_url and '?>' in base_url and scheme == 'file')
if local_php or (allowed_schemes and scheme not in allowed_schemes):
klass = ignoreurl.IgnoreUrl
else:
assume_local_file = (recursion_level == 0)
klass = get_urlclass_from(scheme, assume_local_file=assume_local_file)
log.debug(LOG_CHECK, "%s handles url %s", klass.__name__, base_url)
return klass(base_url, recursion_level, aggregate,
parent_url=parent_url, base_ref=base_ref,
line=line, column=column, page=page, name=name, extern=extern) | def function[get_url_from, parameter[base_url, recursion_level, aggregate, parent_url, base_ref, line, column, page, name, parent_content_type, extern]]:
constant[
Get url data from given base data.
@param base_url: base url from a link tag
@type base_url: string or None
@param recursion_level: current recursion level
@type recursion_level: number
@param aggregate: aggregate object
@type aggregate: aggregate.Consumer
@param parent_url: parent url
@type parent_url: string or None
@param base_ref: base url from <base> tag
@type base_ref string or None
@param line: line number
@type line: number
@param column: column number
@type column: number
@param page: page number
@type page: number
@param name: link name
@type name: string
@param extern: (is_extern, is_strict) or None
@type extern: tuple(int, int) or None
]
if compare[name[base_url] is_not constant[None]] begin[:]
variable[base_url] assign[=] call[name[strformat].unicode_safe, parameter[name[base_url]]]
variable[base_url_stripped] assign[=] call[name[base_url].lstrip, parameter[]]
if compare[name[parent_url] is_not constant[None]] begin[:]
variable[parent_url] assign[=] call[name[strformat].unicode_safe, parameter[name[parent_url]]]
if compare[name[base_ref] is_not constant[None]] begin[:]
variable[base_ref] assign[=] call[name[strformat].unicode_safe, parameter[name[base_ref]]]
variable[name] assign[=] call[name[strformat].unicode_safe, parameter[name[name]]]
variable[url] assign[=] call[call[name[absolute_url], parameter[name[base_url_stripped], name[base_ref], name[parent_url]]].lower, parameter[]]
if compare[constant[:] in name[url]] begin[:]
variable[scheme] assign[=] call[call[call[name[url].split, parameter[constant[:], constant[1]]]][constant[0]].lower, parameter[]]
variable[allowed_schemes] assign[=] call[name[aggregate].config][constant[allowedschemes]]
variable[local_php] assign[=] <ast.BoolOp object at 0x7da20e9623e0>
if <ast.BoolOp object at 0x7da18f721a50> begin[:]
variable[klass] assign[=] name[ignoreurl].IgnoreUrl
call[name[log].debug, parameter[name[LOG_CHECK], constant[%s handles url %s], name[klass].__name__, name[base_url]]]
return[call[name[klass], parameter[name[base_url], name[recursion_level], name[aggregate]]]] | keyword[def] identifier[get_url_from] ( identifier[base_url] , identifier[recursion_level] , identifier[aggregate] ,
identifier[parent_url] = keyword[None] , identifier[base_ref] = keyword[None] , identifier[line] = literal[int] , identifier[column] = literal[int] , identifier[page] = literal[int] ,
identifier[name] = literal[string] , identifier[parent_content_type] = keyword[None] , identifier[extern] = keyword[None] ):
literal[string]
keyword[if] identifier[base_url] keyword[is] keyword[not] keyword[None] :
identifier[base_url] = identifier[strformat] . identifier[unicode_safe] ( identifier[base_url] )
identifier[base_url_stripped] = identifier[base_url] . identifier[lstrip] ()
keyword[else] :
identifier[base_url_stripped] = identifier[base_url]
keyword[if] identifier[parent_url] keyword[is] keyword[not] keyword[None] :
identifier[parent_url] = identifier[strformat] . identifier[unicode_safe] ( identifier[parent_url] )
keyword[if] identifier[base_ref] keyword[is] keyword[not] keyword[None] :
identifier[base_ref] = identifier[strformat] . identifier[unicode_safe] ( identifier[base_ref] )
identifier[name] = identifier[strformat] . identifier[unicode_safe] ( identifier[name] )
identifier[url] = identifier[absolute_url] ( identifier[base_url_stripped] , identifier[base_ref] , identifier[parent_url] ). identifier[lower] ()
keyword[if] literal[string] keyword[in] identifier[url] :
identifier[scheme] = identifier[url] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[lower] ()
keyword[else] :
identifier[scheme] = keyword[None]
keyword[if] keyword[not] ( identifier[url] keyword[or] identifier[name] ):
identifier[name] = identifier[base_url] . identifier[replace] ( literal[string] , literal[string] )
identifier[allowed_schemes] = identifier[aggregate] . identifier[config] [ literal[string] ]
identifier[local_php] =( identifier[parent_content_type] == literal[string] keyword[and]
literal[string] keyword[in] identifier[base_url] keyword[and] literal[string] keyword[in] identifier[base_url] keyword[and] identifier[scheme] == literal[string] )
keyword[if] identifier[local_php] keyword[or] ( identifier[allowed_schemes] keyword[and] identifier[scheme] keyword[not] keyword[in] identifier[allowed_schemes] ):
identifier[klass] = identifier[ignoreurl] . identifier[IgnoreUrl]
keyword[else] :
identifier[assume_local_file] =( identifier[recursion_level] == literal[int] )
identifier[klass] = identifier[get_urlclass_from] ( identifier[scheme] , identifier[assume_local_file] = identifier[assume_local_file] )
identifier[log] . identifier[debug] ( identifier[LOG_CHECK] , literal[string] , identifier[klass] . identifier[__name__] , identifier[base_url] )
keyword[return] identifier[klass] ( identifier[base_url] , identifier[recursion_level] , identifier[aggregate] ,
identifier[parent_url] = identifier[parent_url] , identifier[base_ref] = identifier[base_ref] ,
identifier[line] = identifier[line] , identifier[column] = identifier[column] , identifier[page] = identifier[page] , identifier[name] = identifier[name] , identifier[extern] = identifier[extern] ) | def get_url_from(base_url, recursion_level, aggregate, parent_url=None, base_ref=None, line=0, column=0, page=0, name=u'', parent_content_type=None, extern=None):
"""
Get url data from given base data.
@param base_url: base url from a link tag
@type base_url: string or None
@param recursion_level: current recursion level
@type recursion_level: number
@param aggregate: aggregate object
@type aggregate: aggregate.Consumer
@param parent_url: parent url
@type parent_url: string or None
@param base_ref: base url from <base> tag
@type base_ref string or None
@param line: line number
@type line: number
@param column: column number
@type column: number
@param page: page number
@type page: number
@param name: link name
@type name: string
@param extern: (is_extern, is_strict) or None
@type extern: tuple(int, int) or None
"""
if base_url is not None:
base_url = strformat.unicode_safe(base_url)
# left strip for detection of URL scheme
base_url_stripped = base_url.lstrip() # depends on [control=['if'], data=['base_url']]
else:
base_url_stripped = base_url
if parent_url is not None:
parent_url = strformat.unicode_safe(parent_url) # depends on [control=['if'], data=['parent_url']]
if base_ref is not None:
base_ref = strformat.unicode_safe(base_ref) # depends on [control=['if'], data=['base_ref']]
name = strformat.unicode_safe(name)
url = absolute_url(base_url_stripped, base_ref, parent_url).lower()
if ':' in url:
scheme = url.split(':', 1)[0].lower() # depends on [control=['if'], data=['url']]
else:
scheme = None
if not (url or name):
# use filename as base url, with slash as path seperator
name = base_url.replace('\\', '/') # depends on [control=['if'], data=[]]
allowed_schemes = aggregate.config['allowedschemes']
# ignore local PHP files with execution directives
local_php = parent_content_type == 'application/x-httpd-php' and '<?' in base_url and ('?>' in base_url) and (scheme == 'file')
if local_php or (allowed_schemes and scheme not in allowed_schemes):
klass = ignoreurl.IgnoreUrl # depends on [control=['if'], data=[]]
else:
assume_local_file = recursion_level == 0
klass = get_urlclass_from(scheme, assume_local_file=assume_local_file)
log.debug(LOG_CHECK, '%s handles url %s', klass.__name__, base_url)
return klass(base_url, recursion_level, aggregate, parent_url=parent_url, base_ref=base_ref, line=line, column=column, page=page, name=name, extern=extern) |
def list_downloads():
'''
Return a list of all updates that have been downloaded locally.
:return: A list of updates that have been downloaded
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' softwareupdate.list_downloads
'''
outfiles = []
for root, subFolder, files in salt.utils.path.os_walk('/Library/Updates'):
for f in files:
outfiles.append(os.path.join(root, f))
dist_files = []
for f in outfiles:
if f.endswith('.dist'):
dist_files.append(f)
ret = []
for update in _get_available():
for f in dist_files:
with salt.utils.files.fopen(f) as fhr:
if update.rsplit('-', 1)[0] in salt.utils.stringutils.to_unicode(fhr.read()):
ret.append(update)
return ret | def function[list_downloads, parameter[]]:
constant[
Return a list of all updates that have been downloaded locally.
:return: A list of updates that have been downloaded
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' softwareupdate.list_downloads
]
variable[outfiles] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2045678e0>, <ast.Name object at 0x7da204564b50>, <ast.Name object at 0x7da204566020>]]] in starred[call[name[salt].utils.path.os_walk, parameter[constant[/Library/Updates]]]] begin[:]
for taget[name[f]] in starred[name[files]] begin[:]
call[name[outfiles].append, parameter[call[name[os].path.join, parameter[name[root], name[f]]]]]
variable[dist_files] assign[=] list[[]]
for taget[name[f]] in starred[name[outfiles]] begin[:]
if call[name[f].endswith, parameter[constant[.dist]]] begin[:]
call[name[dist_files].append, parameter[name[f]]]
variable[ret] assign[=] list[[]]
for taget[name[update]] in starred[call[name[_get_available], parameter[]]] begin[:]
for taget[name[f]] in starred[name[dist_files]] begin[:]
with call[name[salt].utils.files.fopen, parameter[name[f]]] begin[:]
if compare[call[call[name[update].rsplit, parameter[constant[-], constant[1]]]][constant[0]] in call[name[salt].utils.stringutils.to_unicode, parameter[call[name[fhr].read, parameter[]]]]] begin[:]
call[name[ret].append, parameter[name[update]]]
return[name[ret]] | keyword[def] identifier[list_downloads] ():
literal[string]
identifier[outfiles] =[]
keyword[for] identifier[root] , identifier[subFolder] , identifier[files] keyword[in] identifier[salt] . identifier[utils] . identifier[path] . identifier[os_walk] ( literal[string] ):
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[outfiles] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[f] ))
identifier[dist_files] =[]
keyword[for] identifier[f] keyword[in] identifier[outfiles] :
keyword[if] identifier[f] . identifier[endswith] ( literal[string] ):
identifier[dist_files] . identifier[append] ( identifier[f] )
identifier[ret] =[]
keyword[for] identifier[update] keyword[in] identifier[_get_available] ():
keyword[for] identifier[f] keyword[in] identifier[dist_files] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[f] ) keyword[as] identifier[fhr] :
keyword[if] identifier[update] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ] keyword[in] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[fhr] . identifier[read] ()):
identifier[ret] . identifier[append] ( identifier[update] )
keyword[return] identifier[ret] | def list_downloads():
"""
Return a list of all updates that have been downloaded locally.
:return: A list of updates that have been downloaded
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' softwareupdate.list_downloads
"""
outfiles = []
for (root, subFolder, files) in salt.utils.path.os_walk('/Library/Updates'):
for f in files:
outfiles.append(os.path.join(root, f)) # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]]
dist_files = []
for f in outfiles:
if f.endswith('.dist'):
dist_files.append(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
ret = []
for update in _get_available():
for f in dist_files:
with salt.utils.files.fopen(f) as fhr:
if update.rsplit('-', 1)[0] in salt.utils.stringutils.to_unicode(fhr.read()):
ret.append(update) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fhr']] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['update']]
return ret |
def from_pygments_lexer_cls(cls, lexer_cls):
"""
Create a :class:`.RegexSync` instance for this Pygments lexer class.
"""
patterns = {
# For Python, start highlighting at any class/def block.
'Python': r'^\s*(class|def)\s+',
'Python 3': r'^\s*(class|def)\s+',
# For HTML, start at any open/close tag definition.
'HTML': r'<[/a-zA-Z]',
# For javascript, start at a function.
'JavaScript': r'\bfunction\b'
# TODO: Add definitions for other languages.
# By default, we start at every possible line.
}
p = patterns.get(lexer_cls.name, '^')
return cls(p) | def function[from_pygments_lexer_cls, parameter[cls, lexer_cls]]:
constant[
Create a :class:`.RegexSync` instance for this Pygments lexer class.
]
variable[patterns] assign[=] dictionary[[<ast.Constant object at 0x7da204347eb0>, <ast.Constant object at 0x7da204344dc0>, <ast.Constant object at 0x7da204347af0>, <ast.Constant object at 0x7da2043442e0>], [<ast.Constant object at 0x7da204565b70>, <ast.Constant object at 0x7da204564220>, <ast.Constant object at 0x7da204566e90>, <ast.Constant object at 0x7da204565510>]]
variable[p] assign[=] call[name[patterns].get, parameter[name[lexer_cls].name, constant[^]]]
return[call[name[cls], parameter[name[p]]]] | keyword[def] identifier[from_pygments_lexer_cls] ( identifier[cls] , identifier[lexer_cls] ):
literal[string]
identifier[patterns] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[p] = identifier[patterns] . identifier[get] ( identifier[lexer_cls] . identifier[name] , literal[string] )
keyword[return] identifier[cls] ( identifier[p] ) | def from_pygments_lexer_cls(cls, lexer_cls):
"""
Create a :class:`.RegexSync` instance for this Pygments lexer class.
"""
# For Python, start highlighting at any class/def block.
# For HTML, start at any open/close tag definition.
# For javascript, start at a function.
# TODO: Add definitions for other languages.
# By default, we start at every possible line.
patterns = {'Python': '^\\s*(class|def)\\s+', 'Python 3': '^\\s*(class|def)\\s+', 'HTML': '<[/a-zA-Z]', 'JavaScript': '\\bfunction\\b'}
p = patterns.get(lexer_cls.name, '^')
return cls(p) |
def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (
request.user.username,
request.user.token.id,
request.user.tenant_id,
request.user.token.project.get('domain_id'),
base.url_for(request, 'compute'),
base.url_for(request, 'identity')
) | def function[get_auth_params_from_request, parameter[request]]:
constant[Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
]
return[tuple[[<ast.Attribute object at 0x7da1b1985780>, <ast.Attribute object at 0x7da1b1985510>, <ast.Attribute object at 0x7da1b1987f40>, <ast.Call object at 0x7da1b1986ce0>, <ast.Call object at 0x7da1b1985f90>, <ast.Call object at 0x7da1b1985e10>]]] | keyword[def] identifier[get_auth_params_from_request] ( identifier[request] ):
literal[string]
keyword[return] (
identifier[request] . identifier[user] . identifier[username] ,
identifier[request] . identifier[user] . identifier[token] . identifier[id] ,
identifier[request] . identifier[user] . identifier[tenant_id] ,
identifier[request] . identifier[user] . identifier[token] . identifier[project] . identifier[get] ( literal[string] ),
identifier[base] . identifier[url_for] ( identifier[request] , literal[string] ),
identifier[base] . identifier[url_for] ( identifier[request] , literal[string] )
) | def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (request.user.username, request.user.token.id, request.user.tenant_id, request.user.token.project.get('domain_id'), base.url_for(request, 'compute'), base.url_for(request, 'identity')) |
def _set_bpdu_drop(self, v, load=False):
"""
Setter method for bpdu_drop, mapped from YANG variable /interface/hundredgigabitethernet/bpdu_drop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bpdu_drop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bpdu_drop() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'phy-stp-config', u'sort-priority': u'105', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bpdu_drop must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'phy-stp-config', u'sort-priority': u'105', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)""",
})
self.__bpdu_drop = t
if hasattr(self, '_set'):
self._set() | def function[_set_bpdu_drop, parameter[self, v, load]]:
constant[
Setter method for bpdu_drop, mapped from YANG variable /interface/hundredgigabitethernet/bpdu_drop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bpdu_drop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bpdu_drop() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18bc70a30>
name[self].__bpdu_drop assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_bpdu_drop] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[bpdu_drop] . identifier[bpdu_drop] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__bpdu_drop] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_bpdu_drop(self, v, load=False):
"""
Setter method for bpdu_drop, mapped from YANG variable /interface/hundredgigabitethernet/bpdu_drop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bpdu_drop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bpdu_drop() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name='bpdu-drop', rest_name='bpdu-drop', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'phy-stp-config', u'sort-priority': u'105', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'bpdu_drop must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=bpdu_drop.bpdu_drop, is_container=\'container\', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Drop received BPDUs\', u\'callpoint\': u\'phy-stp-config\', u\'sort-priority\': u\'105\', u\'cli-compact-syntax\': None, u\'cli-sequence-commands\': None, u\'cli-incomplete-no\': None, u\'display-when\': u\'/vcsmode/vcs-mode = "true"\'}}, namespace=\'urn:brocade.com:mgmt:brocade-xstp\', defining_module=\'brocade-xstp\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__bpdu_drop = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def files(self):
"""
Returns the list of file names for all documents.
"""
return [
os.path.join(self.root, label, name)
for label in self.labels
for name in os.listdir(os.path.join(self.root, label))
] | def function[files, parameter[self]]:
constant[
Returns the list of file names for all documents.
]
return[<ast.ListComp object at 0x7da18f00cfa0>] | keyword[def] identifier[files] ( identifier[self] ):
literal[string]
keyword[return] [
identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root] , identifier[label] , identifier[name] )
keyword[for] identifier[label] keyword[in] identifier[self] . identifier[labels]
keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root] , identifier[label] ))
] | def files(self):
"""
Returns the list of file names for all documents.
"""
return [os.path.join(self.root, label, name) for label in self.labels for name in os.listdir(os.path.join(self.root, label))] |
def cache(self, name, cache_class=Cache,
identity_generator_class=IdentityGenerator,
compressor_class=Compressor,
serializer_class=Serializer, *args, **kwargs):
"""
Return a cache object using default identity generator,
serializer and compressor.
``name`` is used to identify the series of your cache
``cache_class`` Cache is for normal use and HerdCache
is used in case of Thundering Herd Problem
``identity_generator_class`` is the class used to generate
the real unique key in cache, can be overwritten to
meet your special needs. It should provide `generate` API
``compressor_class`` is the class used to compress cache in redis,
can be overwritten with API `compress` and `decompress` retained.
``serializer_class`` is the class used to serialize
content before compress, can be overwritten with API
`serialize` and `deserialize` retained.
"""
return cache_class(self, app=name,
identity_generator_class=identity_generator_class,
compressor_class=compressor_class,
serializer_class=serializer_class,
*args, **kwargs) | def function[cache, parameter[self, name, cache_class, identity_generator_class, compressor_class, serializer_class]]:
constant[
Return a cache object using default identity generator,
serializer and compressor.
``name`` is used to identify the series of your cache
``cache_class`` Cache is for normal use and HerdCache
is used in case of Thundering Herd Problem
``identity_generator_class`` is the class used to generate
the real unique key in cache, can be overwritten to
meet your special needs. It should provide `generate` API
``compressor_class`` is the class used to compress cache in redis,
can be overwritten with API `compress` and `decompress` retained.
``serializer_class`` is the class used to serialize
content before compress, can be overwritten with API
`serialize` and `deserialize` retained.
]
return[call[name[cache_class], parameter[name[self], <ast.Starred object at 0x7da1b078a3e0>]]] | keyword[def] identifier[cache] ( identifier[self] , identifier[name] , identifier[cache_class] = identifier[Cache] ,
identifier[identity_generator_class] = identifier[IdentityGenerator] ,
identifier[compressor_class] = identifier[Compressor] ,
identifier[serializer_class] = identifier[Serializer] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[cache_class] ( identifier[self] , identifier[app] = identifier[name] ,
identifier[identity_generator_class] = identifier[identity_generator_class] ,
identifier[compressor_class] = identifier[compressor_class] ,
identifier[serializer_class] = identifier[serializer_class] ,
* identifier[args] ,** identifier[kwargs] ) | def cache(self, name, cache_class=Cache, identity_generator_class=IdentityGenerator, compressor_class=Compressor, serializer_class=Serializer, *args, **kwargs):
"""
Return a cache object using default identity generator,
serializer and compressor.
``name`` is used to identify the series of your cache
``cache_class`` Cache is for normal use and HerdCache
is used in case of Thundering Herd Problem
``identity_generator_class`` is the class used to generate
the real unique key in cache, can be overwritten to
meet your special needs. It should provide `generate` API
``compressor_class`` is the class used to compress cache in redis,
can be overwritten with API `compress` and `decompress` retained.
``serializer_class`` is the class used to serialize
content before compress, can be overwritten with API
`serialize` and `deserialize` retained.
"""
return cache_class(self, *args, app=name, identity_generator_class=identity_generator_class, compressor_class=compressor_class, serializer_class=serializer_class, **kwargs) |
def _imagpart(self, f):
"""Function returning the imaginary part of the result from ``f``."""
def f_im(x, **kwargs):
result = np.asarray(f(x, **kwargs),
dtype=self.scalar_out_dtype)
return result.imag
if is_real_dtype(self.out_dtype):
return self.zero()
else:
return self.real_space.element(f_im) | def function[_imagpart, parameter[self, f]]:
constant[Function returning the imaginary part of the result from ``f``.]
def function[f_im, parameter[x]]:
variable[result] assign[=] call[name[np].asarray, parameter[call[name[f], parameter[name[x]]]]]
return[name[result].imag]
if call[name[is_real_dtype], parameter[name[self].out_dtype]] begin[:]
return[call[name[self].zero, parameter[]]] | keyword[def] identifier[_imagpart] ( identifier[self] , identifier[f] ):
literal[string]
keyword[def] identifier[f_im] ( identifier[x] ,** identifier[kwargs] ):
identifier[result] = identifier[np] . identifier[asarray] ( identifier[f] ( identifier[x] ,** identifier[kwargs] ),
identifier[dtype] = identifier[self] . identifier[scalar_out_dtype] )
keyword[return] identifier[result] . identifier[imag]
keyword[if] identifier[is_real_dtype] ( identifier[self] . identifier[out_dtype] ):
keyword[return] identifier[self] . identifier[zero] ()
keyword[else] :
keyword[return] identifier[self] . identifier[real_space] . identifier[element] ( identifier[f_im] ) | def _imagpart(self, f):
"""Function returning the imaginary part of the result from ``f``."""
def f_im(x, **kwargs):
result = np.asarray(f(x, **kwargs), dtype=self.scalar_out_dtype)
return result.imag
if is_real_dtype(self.out_dtype):
return self.zero() # depends on [control=['if'], data=[]]
else:
return self.real_space.element(f_im) |
def JTg(self):
r'''Joule Thomson coefficient of the chemical in the gas phase at
its current temperature and pressure, in units of [K/Pa].
.. math::
\mu_{JT} = \left(\frac{\partial T}{\partial P}\right)_H = \frac{1}{C_p}
\left[T \left(\frac{\partial V}{\partial T}\right)_P - V\right]
= \frac{V}{C_p}\left(\beta T-1\right)
Utilizes the temperature-derivative method of
:obj:`thermo.volume.VolumeGas` and the temperature-dependent heat
capacity method :obj:`thermo.heat_capacity.HeatCapacityGas` to
obtain the properties required for the actual calculation.
Examples
--------
>>> Chemical('dodecane', T=400, P=1000).JTg
5.4089897835384913e-05
'''
Vmg, Cpgm, isobaric_expansion_g = self.Vmg, self.Cpgm, self.isobaric_expansion_g
if all((Vmg, Cpgm, isobaric_expansion_g)):
return Joule_Thomson(T=self.T, V=Vmg, Cp=Cpgm, beta=isobaric_expansion_g)
return None | def function[JTg, parameter[self]]:
constant[Joule Thomson coefficient of the chemical in the gas phase at
its current temperature and pressure, in units of [K/Pa].
.. math::
\mu_{JT} = \left(\frac{\partial T}{\partial P}\right)_H = \frac{1}{C_p}
\left[T \left(\frac{\partial V}{\partial T}\right)_P - V\right]
= \frac{V}{C_p}\left(\beta T-1\right)
Utilizes the temperature-derivative method of
:obj:`thermo.volume.VolumeGas` and the temperature-dependent heat
capacity method :obj:`thermo.heat_capacity.HeatCapacityGas` to
obtain the properties required for the actual calculation.
Examples
--------
>>> Chemical('dodecane', T=400, P=1000).JTg
5.4089897835384913e-05
]
<ast.Tuple object at 0x7da2043447f0> assign[=] tuple[[<ast.Attribute object at 0x7da2043461a0>, <ast.Attribute object at 0x7da204345ab0>, <ast.Attribute object at 0x7da204344fd0>]]
if call[name[all], parameter[tuple[[<ast.Name object at 0x7da204346740>, <ast.Name object at 0x7da204346a10>, <ast.Name object at 0x7da2043445b0>]]]] begin[:]
return[call[name[Joule_Thomson], parameter[]]]
return[constant[None]] | keyword[def] identifier[JTg] ( identifier[self] ):
literal[string]
identifier[Vmg] , identifier[Cpgm] , identifier[isobaric_expansion_g] = identifier[self] . identifier[Vmg] , identifier[self] . identifier[Cpgm] , identifier[self] . identifier[isobaric_expansion_g]
keyword[if] identifier[all] (( identifier[Vmg] , identifier[Cpgm] , identifier[isobaric_expansion_g] )):
keyword[return] identifier[Joule_Thomson] ( identifier[T] = identifier[self] . identifier[T] , identifier[V] = identifier[Vmg] , identifier[Cp] = identifier[Cpgm] , identifier[beta] = identifier[isobaric_expansion_g] )
keyword[return] keyword[None] | def JTg(self):
"""Joule Thomson coefficient of the chemical in the gas phase at
its current temperature and pressure, in units of [K/Pa].
.. math::
\\mu_{JT} = \\left(\\frac{\\partial T}{\\partial P}\\right)_H = \\frac{1}{C_p}
\\left[T \\left(\\frac{\\partial V}{\\partial T}\\right)_P - V\\right]
= \\frac{V}{C_p}\\left(\\beta T-1\\right)
Utilizes the temperature-derivative method of
:obj:`thermo.volume.VolumeGas` and the temperature-dependent heat
capacity method :obj:`thermo.heat_capacity.HeatCapacityGas` to
obtain the properties required for the actual calculation.
Examples
--------
>>> Chemical('dodecane', T=400, P=1000).JTg
5.4089897835384913e-05
"""
(Vmg, Cpgm, isobaric_expansion_g) = (self.Vmg, self.Cpgm, self.isobaric_expansion_g)
if all((Vmg, Cpgm, isobaric_expansion_g)):
return Joule_Thomson(T=self.T, V=Vmg, Cp=Cpgm, beta=isobaric_expansion_g) # depends on [control=['if'], data=[]]
return None |
def create_package_file(root, master_package, subroot, py_files, opts, subs):
"""Build the text of the file and write the file."""
text = format_heading(1, '%s' % makename(master_package, subroot))
if opts.modulefirst:
text += format_directive(subroot, master_package)
text += '\n'
# build a list of directories that are szvpackages (contain an INITPY file)
subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
# if there are some package directories, add a TOC for theses subpackages
if subs:
# text += format_heading(2, 'Subpackages')
text += '.. toctree::\n\n'
for sub in subs:
text += ' %s.%s\n' % (makename(master_package, subroot), sub)
text += '\n'
submods = [path.splitext(sub)[0] for sub in py_files
if not shall_skip(path.join(root, sub), opts) and
sub != INITPY]
if submods:
#text += format_heading(2, 'Submodules')
if opts.separatemodules:
text += '.. toctree::\n\n'
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
text += ' %s\n' % modfile
# generate separate file for this module
if not opts.noheadings:
filetext = format_heading(1, '%s module' % modfile)
else:
filetext = ''
filetext += format_directive(makename(subroot, submod),
master_package)
write_file(modfile, filetext, opts)
else:
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
if not opts.noheadings:
text += format_heading(2, '%s module' % modfile)
text += format_directive(makename(subroot, submod),
master_package)
text += '\n'
text += '\n'
if not opts.modulefirst:
text += format_heading(2, 'Module contents')
text += format_directive(subroot, master_package)
write_file(makename(master_package, subroot), text, opts) | def function[create_package_file, parameter[root, master_package, subroot, py_files, opts, subs]]:
constant[Build the text of the file and write the file.]
variable[text] assign[=] call[name[format_heading], parameter[constant[1], binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> call[name[makename], parameter[name[master_package], name[subroot]]]]]]
if name[opts].modulefirst begin[:]
<ast.AugAssign object at 0x7da1b1831930>
<ast.AugAssign object at 0x7da1b18313c0>
variable[subs] assign[=] <ast.ListComp object at 0x7da1b18326b0>
if name[subs] begin[:]
<ast.AugAssign object at 0x7da1b1831b70>
for taget[name[sub]] in starred[name[subs]] begin[:]
<ast.AugAssign object at 0x7da1b1831780>
<ast.AugAssign object at 0x7da1b1833040>
variable[submods] assign[=] <ast.ListComp object at 0x7da1b1831810>
if name[submods] begin[:]
if name[opts].separatemodules begin[:]
<ast.AugAssign object at 0x7da1b18333d0>
for taget[name[submod]] in starred[name[submods]] begin[:]
variable[modfile] assign[=] call[name[makename], parameter[name[master_package], call[name[makename], parameter[name[subroot], name[submod]]]]]
<ast.AugAssign object at 0x7da1b1a3ded0>
if <ast.UnaryOp object at 0x7da1b1a3f040> begin[:]
variable[filetext] assign[=] call[name[format_heading], parameter[constant[1], binary_operation[constant[%s module] <ast.Mod object at 0x7da2590d6920> name[modfile]]]]
<ast.AugAssign object at 0x7da1b1a3f340>
call[name[write_file], parameter[name[modfile], name[filetext], name[opts]]]
<ast.AugAssign object at 0x7da1b18666b0>
if <ast.UnaryOp object at 0x7da1b18643a0> begin[:]
<ast.AugAssign object at 0x7da1b1833880>
<ast.AugAssign object at 0x7da1b1830df0>
call[name[write_file], parameter[call[name[makename], parameter[name[master_package], name[subroot]]], name[text], name[opts]]] | keyword[def] identifier[create_package_file] ( identifier[root] , identifier[master_package] , identifier[subroot] , identifier[py_files] , identifier[opts] , identifier[subs] ):
literal[string]
identifier[text] = identifier[format_heading] ( literal[int] , literal[string] % identifier[makename] ( identifier[master_package] , identifier[subroot] ))
keyword[if] identifier[opts] . identifier[modulefirst] :
identifier[text] += identifier[format_directive] ( identifier[subroot] , identifier[master_package] )
identifier[text] += literal[string]
identifier[subs] =[ identifier[sub] keyword[for] identifier[sub] keyword[in] identifier[subs] keyword[if] identifier[path] . identifier[isfile] ( identifier[path] . identifier[join] ( identifier[root] , identifier[sub] , identifier[INITPY] ))]
keyword[if] identifier[subs] :
identifier[text] += literal[string]
keyword[for] identifier[sub] keyword[in] identifier[subs] :
identifier[text] += literal[string] %( identifier[makename] ( identifier[master_package] , identifier[subroot] ), identifier[sub] )
identifier[text] += literal[string]
identifier[submods] =[ identifier[path] . identifier[splitext] ( identifier[sub] )[ literal[int] ] keyword[for] identifier[sub] keyword[in] identifier[py_files]
keyword[if] keyword[not] identifier[shall_skip] ( identifier[path] . identifier[join] ( identifier[root] , identifier[sub] ), identifier[opts] ) keyword[and]
identifier[sub] != identifier[INITPY] ]
keyword[if] identifier[submods] :
keyword[if] identifier[opts] . identifier[separatemodules] :
identifier[text] += literal[string]
keyword[for] identifier[submod] keyword[in] identifier[submods] :
identifier[modfile] = identifier[makename] ( identifier[master_package] , identifier[makename] ( identifier[subroot] , identifier[submod] ))
identifier[text] += literal[string] % identifier[modfile]
keyword[if] keyword[not] identifier[opts] . identifier[noheadings] :
identifier[filetext] = identifier[format_heading] ( literal[int] , literal[string] % identifier[modfile] )
keyword[else] :
identifier[filetext] = literal[string]
identifier[filetext] += identifier[format_directive] ( identifier[makename] ( identifier[subroot] , identifier[submod] ),
identifier[master_package] )
identifier[write_file] ( identifier[modfile] , identifier[filetext] , identifier[opts] )
keyword[else] :
keyword[for] identifier[submod] keyword[in] identifier[submods] :
identifier[modfile] = identifier[makename] ( identifier[master_package] , identifier[makename] ( identifier[subroot] , identifier[submod] ))
keyword[if] keyword[not] identifier[opts] . identifier[noheadings] :
identifier[text] += identifier[format_heading] ( literal[int] , literal[string] % identifier[modfile] )
identifier[text] += identifier[format_directive] ( identifier[makename] ( identifier[subroot] , identifier[submod] ),
identifier[master_package] )
identifier[text] += literal[string]
identifier[text] += literal[string]
keyword[if] keyword[not] identifier[opts] . identifier[modulefirst] :
identifier[text] += identifier[format_heading] ( literal[int] , literal[string] )
identifier[text] += identifier[format_directive] ( identifier[subroot] , identifier[master_package] )
identifier[write_file] ( identifier[makename] ( identifier[master_package] , identifier[subroot] ), identifier[text] , identifier[opts] ) | def create_package_file(root, master_package, subroot, py_files, opts, subs):
"""Build the text of the file and write the file."""
text = format_heading(1, '%s' % makename(master_package, subroot))
if opts.modulefirst:
text += format_directive(subroot, master_package)
text += '\n' # depends on [control=['if'], data=[]]
# build a list of directories that are szvpackages (contain an INITPY file)
subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
# if there are some package directories, add a TOC for theses subpackages
if subs:
# text += format_heading(2, 'Subpackages')
text += '.. toctree::\n\n'
for sub in subs:
text += ' %s.%s\n' % (makename(master_package, subroot), sub) # depends on [control=['for'], data=['sub']]
text += '\n' # depends on [control=['if'], data=[]]
submods = [path.splitext(sub)[0] for sub in py_files if not shall_skip(path.join(root, sub), opts) and sub != INITPY]
if submods:
#text += format_heading(2, 'Submodules')
if opts.separatemodules:
text += '.. toctree::\n\n'
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
text += ' %s\n' % modfile
# generate separate file for this module
if not opts.noheadings:
filetext = format_heading(1, '%s module' % modfile) # depends on [control=['if'], data=[]]
else:
filetext = ''
filetext += format_directive(makename(subroot, submod), master_package)
write_file(modfile, filetext, opts) # depends on [control=['for'], data=['submod']] # depends on [control=['if'], data=[]]
else:
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
if not opts.noheadings:
text += format_heading(2, '%s module' % modfile) # depends on [control=['if'], data=[]]
text += format_directive(makename(subroot, submod), master_package)
text += '\n' # depends on [control=['for'], data=['submod']]
text += '\n' # depends on [control=['if'], data=[]]
if not opts.modulefirst:
text += format_heading(2, 'Module contents')
text += format_directive(subroot, master_package) # depends on [control=['if'], data=[]]
write_file(makename(master_package, subroot), text, opts) |
def calculate_ef_var(tpf, fpf):
"""
determine variance due to actives (efvar_a) decoys (efvar_d) and s2, the slope of the ROC curve tangent to the
fpf @ which the enrichment factor was calculated
:param tpf: float tpf @ which the enrichment factor was calculated
:param fpf: float fpf @ which the enrichment factor was calculated
:return efvara, efvard, s2: tuple
"""
efvara = (tpf * (1 - tpf))
efvard = (fpf * (1 - fpf))
ef = tpf / fpf
if fpf == 1:
return(0, 0, 0)
else:
s = ef * ( 1 + (np.log(ef)/np.log(fpf)))
s2 = s * s
return (efvara, efvard, s2) | def function[calculate_ef_var, parameter[tpf, fpf]]:
constant[
determine variance due to actives (efvar_a) decoys (efvar_d) and s2, the slope of the ROC curve tangent to the
fpf @ which the enrichment factor was calculated
:param tpf: float tpf @ which the enrichment factor was calculated
:param fpf: float fpf @ which the enrichment factor was calculated
:return efvara, efvard, s2: tuple
]
variable[efvara] assign[=] binary_operation[name[tpf] * binary_operation[constant[1] - name[tpf]]]
variable[efvard] assign[=] binary_operation[name[fpf] * binary_operation[constant[1] - name[fpf]]]
variable[ef] assign[=] binary_operation[name[tpf] / name[fpf]]
if compare[name[fpf] equal[==] constant[1]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18c4cff70>, <ast.Constant object at 0x7da18c4cf310>, <ast.Constant object at 0x7da18c4ceb30>]]] | keyword[def] identifier[calculate_ef_var] ( identifier[tpf] , identifier[fpf] ):
literal[string]
identifier[efvara] =( identifier[tpf] *( literal[int] - identifier[tpf] ))
identifier[efvard] =( identifier[fpf] *( literal[int] - identifier[fpf] ))
identifier[ef] = identifier[tpf] / identifier[fpf]
keyword[if] identifier[fpf] == literal[int] :
keyword[return] ( literal[int] , literal[int] , literal[int] )
keyword[else] :
identifier[s] = identifier[ef] *( literal[int] +( identifier[np] . identifier[log] ( identifier[ef] )/ identifier[np] . identifier[log] ( identifier[fpf] )))
identifier[s2] = identifier[s] * identifier[s]
keyword[return] ( identifier[efvara] , identifier[efvard] , identifier[s2] ) | def calculate_ef_var(tpf, fpf):
"""
determine variance due to actives (efvar_a) decoys (efvar_d) and s2, the slope of the ROC curve tangent to the
fpf @ which the enrichment factor was calculated
:param tpf: float tpf @ which the enrichment factor was calculated
:param fpf: float fpf @ which the enrichment factor was calculated
:return efvara, efvard, s2: tuple
"""
efvara = tpf * (1 - tpf)
efvard = fpf * (1 - fpf)
ef = tpf / fpf
if fpf == 1:
return (0, 0, 0) # depends on [control=['if'], data=[]]
else:
s = ef * (1 + np.log(ef) / np.log(fpf))
s2 = s * s
return (efvara, efvard, s2) |
def response_body(self):
'''
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
'''
var_respbody = VARIANT()
_WinHttpRequest._ResponseBody(self, byref(var_respbody))
if var_respbody.is_safearray_of_bytes():
respbody = var_respbody.str_from_safearray()
return respbody
else:
return '' | def function[response_body, parameter[self]]:
constant[
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
]
variable[var_respbody] assign[=] call[name[VARIANT], parameter[]]
call[name[_WinHttpRequest]._ResponseBody, parameter[name[self], call[name[byref], parameter[name[var_respbody]]]]]
if call[name[var_respbody].is_safearray_of_bytes, parameter[]] begin[:]
variable[respbody] assign[=] call[name[var_respbody].str_from_safearray, parameter[]]
return[name[respbody]] | keyword[def] identifier[response_body] ( identifier[self] ):
literal[string]
identifier[var_respbody] = identifier[VARIANT] ()
identifier[_WinHttpRequest] . identifier[_ResponseBody] ( identifier[self] , identifier[byref] ( identifier[var_respbody] ))
keyword[if] identifier[var_respbody] . identifier[is_safearray_of_bytes] ():
identifier[respbody] = identifier[var_respbody] . identifier[str_from_safearray] ()
keyword[return] identifier[respbody]
keyword[else] :
keyword[return] literal[string] | def response_body(self):
"""
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
"""
var_respbody = VARIANT()
_WinHttpRequest._ResponseBody(self, byref(var_respbody))
if var_respbody.is_safearray_of_bytes():
respbody = var_respbody.str_from_safearray()
return respbody # depends on [control=['if'], data=[]]
else:
return '' |
def printTriples(self):
""" display triples """
printDebug(Fore.RED + self.uri + Style.RESET_ALL)
for x in self.triples:
printDebug(Fore.BLACK + "=> " + unicode(x[1]))
printDebug(Style.DIM + ".... " + unicode(x[2]) + Fore.RESET)
print("") | def function[printTriples, parameter[self]]:
constant[ display triples ]
call[name[printDebug], parameter[binary_operation[binary_operation[name[Fore].RED + name[self].uri] + name[Style].RESET_ALL]]]
for taget[name[x]] in starred[name[self].triples] begin[:]
call[name[printDebug], parameter[binary_operation[binary_operation[name[Fore].BLACK + constant[=> ]] + call[name[unicode], parameter[call[name[x]][constant[1]]]]]]]
call[name[printDebug], parameter[binary_operation[binary_operation[binary_operation[name[Style].DIM + constant[.... ]] + call[name[unicode], parameter[call[name[x]][constant[2]]]]] + name[Fore].RESET]]]
call[name[print], parameter[constant[]]] | keyword[def] identifier[printTriples] ( identifier[self] ):
literal[string]
identifier[printDebug] ( identifier[Fore] . identifier[RED] + identifier[self] . identifier[uri] + identifier[Style] . identifier[RESET_ALL] )
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[triples] :
identifier[printDebug] ( identifier[Fore] . identifier[BLACK] + literal[string] + identifier[unicode] ( identifier[x] [ literal[int] ]))
identifier[printDebug] ( identifier[Style] . identifier[DIM] + literal[string] + identifier[unicode] ( identifier[x] [ literal[int] ])+ identifier[Fore] . identifier[RESET] )
identifier[print] ( literal[string] ) | def printTriples(self):
""" display triples """
printDebug(Fore.RED + self.uri + Style.RESET_ALL)
for x in self.triples:
printDebug(Fore.BLACK + '=> ' + unicode(x[1]))
printDebug(Style.DIM + '.... ' + unicode(x[2]) + Fore.RESET) # depends on [control=['for'], data=['x']]
print('') |
def _get_recursive_iterator(self, recursive=False):
"""
Basic recursive iterator whose only purpose is to yield all items
and sections in order, with their full paths as keys.
Main challenge is to de-duplicate items and sections which
have aliases.
Do not add any new features to this iterator, instead
build others that extend this one.
"""
names_yielded = set()
for obj_alias, obj in self._tree.items():
if obj.is_section:
if obj.alias in names_yielded:
continue
names_yielded.add(obj.alias)
yield (obj.alias,), obj
if not recursive:
continue
for sub_item_path, sub_item in obj._get_recursive_iterator(recursive=recursive):
yield (obj_alias,) + sub_item_path, sub_item
else:
# _tree contains duplicates so that we can have multiple aliases point
# to the same item. We have to de-duplicate here.
if obj.name in names_yielded:
continue
names_yielded.add(obj.name)
yield (obj.name,), obj | def function[_get_recursive_iterator, parameter[self, recursive]]:
constant[
Basic recursive iterator whose only purpose is to yield all items
and sections in order, with their full paths as keys.
Main challenge is to de-duplicate items and sections which
have aliases.
Do not add any new features to this iterator, instead
build others that extend this one.
]
variable[names_yielded] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20e957b20>, <ast.Name object at 0x7da20e957b50>]]] in starred[call[name[self]._tree.items, parameter[]]] begin[:]
if name[obj].is_section begin[:]
if compare[name[obj].alias in name[names_yielded]] begin[:]
continue
call[name[names_yielded].add, parameter[name[obj].alias]]
<ast.Yield object at 0x7da20e9550c0>
if <ast.UnaryOp object at 0x7da20e9556f0> begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da20e9542e0>, <ast.Name object at 0x7da20e955450>]]] in starred[call[name[obj]._get_recursive_iterator, parameter[]]] begin[:]
<ast.Yield object at 0x7da20e957a30> | keyword[def] identifier[_get_recursive_iterator] ( identifier[self] , identifier[recursive] = keyword[False] ):
literal[string]
identifier[names_yielded] = identifier[set] ()
keyword[for] identifier[obj_alias] , identifier[obj] keyword[in] identifier[self] . identifier[_tree] . identifier[items] ():
keyword[if] identifier[obj] . identifier[is_section] :
keyword[if] identifier[obj] . identifier[alias] keyword[in] identifier[names_yielded] :
keyword[continue]
identifier[names_yielded] . identifier[add] ( identifier[obj] . identifier[alias] )
keyword[yield] ( identifier[obj] . identifier[alias] ,), identifier[obj]
keyword[if] keyword[not] identifier[recursive] :
keyword[continue]
keyword[for] identifier[sub_item_path] , identifier[sub_item] keyword[in] identifier[obj] . identifier[_get_recursive_iterator] ( identifier[recursive] = identifier[recursive] ):
keyword[yield] ( identifier[obj_alias] ,)+ identifier[sub_item_path] , identifier[sub_item]
keyword[else] :
keyword[if] identifier[obj] . identifier[name] keyword[in] identifier[names_yielded] :
keyword[continue]
identifier[names_yielded] . identifier[add] ( identifier[obj] . identifier[name] )
keyword[yield] ( identifier[obj] . identifier[name] ,), identifier[obj] | def _get_recursive_iterator(self, recursive=False):
"""
Basic recursive iterator whose only purpose is to yield all items
and sections in order, with their full paths as keys.
Main challenge is to de-duplicate items and sections which
have aliases.
Do not add any new features to this iterator, instead
build others that extend this one.
"""
names_yielded = set()
for (obj_alias, obj) in self._tree.items():
if obj.is_section:
if obj.alias in names_yielded:
continue # depends on [control=['if'], data=[]]
names_yielded.add(obj.alias)
yield ((obj.alias,), obj)
if not recursive:
continue # depends on [control=['if'], data=[]]
for (sub_item_path, sub_item) in obj._get_recursive_iterator(recursive=recursive):
yield ((obj_alias,) + sub_item_path, sub_item) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
# _tree contains duplicates so that we can have multiple aliases point
# to the same item. We have to de-duplicate here.
if obj.name in names_yielded:
continue # depends on [control=['if'], data=[]]
names_yielded.add(obj.name)
yield ((obj.name,), obj) # depends on [control=['for'], data=[]] |
def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- environment_context
"""
self.send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
return self.recv_append_partition_with_environment_context() | def function[append_partition_with_environment_context, parameter[self, db_name, tbl_name, part_vals, environment_context]]:
constant[
Parameters:
- db_name
- tbl_name
- part_vals
- environment_context
]
call[name[self].send_append_partition_with_environment_context, parameter[name[db_name], name[tbl_name], name[part_vals], name[environment_context]]]
return[call[name[self].recv_append_partition_with_environment_context, parameter[]]] | keyword[def] identifier[append_partition_with_environment_context] ( identifier[self] , identifier[db_name] , identifier[tbl_name] , identifier[part_vals] , identifier[environment_context] ):
literal[string]
identifier[self] . identifier[send_append_partition_with_environment_context] ( identifier[db_name] , identifier[tbl_name] , identifier[part_vals] , identifier[environment_context] )
keyword[return] identifier[self] . identifier[recv_append_partition_with_environment_context] () | def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- environment_context
"""
self.send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
return self.recv_append_partition_with_environment_context() |
def list(cls, invoice_id, custom_headers=None):
"""
:type user_id: int
:type invoice_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseBytes
"""
if custom_headers is None:
custom_headers = {}
api_client = client.ApiClient(cls._get_api_context())
endpoint_url = cls._ENDPOINT_URL_LISTING.format(
cls._determine_user_id(), invoice_id)
response_raw = api_client.get(endpoint_url, {}, custom_headers)
return BunqResponseBytes.cast_from_bunq_response(
client.BunqResponse(response_raw.body_bytes, response_raw.headers)
) | def function[list, parameter[cls, invoice_id, custom_headers]]:
constant[
:type user_id: int
:type invoice_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseBytes
]
if compare[name[custom_headers] is constant[None]] begin[:]
variable[custom_headers] assign[=] dictionary[[], []]
variable[api_client] assign[=] call[name[client].ApiClient, parameter[call[name[cls]._get_api_context, parameter[]]]]
variable[endpoint_url] assign[=] call[name[cls]._ENDPOINT_URL_LISTING.format, parameter[call[name[cls]._determine_user_id, parameter[]], name[invoice_id]]]
variable[response_raw] assign[=] call[name[api_client].get, parameter[name[endpoint_url], dictionary[[], []], name[custom_headers]]]
return[call[name[BunqResponseBytes].cast_from_bunq_response, parameter[call[name[client].BunqResponse, parameter[name[response_raw].body_bytes, name[response_raw].headers]]]]] | keyword[def] identifier[list] ( identifier[cls] , identifier[invoice_id] , identifier[custom_headers] = keyword[None] ):
literal[string]
keyword[if] identifier[custom_headers] keyword[is] keyword[None] :
identifier[custom_headers] ={}
identifier[api_client] = identifier[client] . identifier[ApiClient] ( identifier[cls] . identifier[_get_api_context] ())
identifier[endpoint_url] = identifier[cls] . identifier[_ENDPOINT_URL_LISTING] . identifier[format] (
identifier[cls] . identifier[_determine_user_id] (), identifier[invoice_id] )
identifier[response_raw] = identifier[api_client] . identifier[get] ( identifier[endpoint_url] ,{}, identifier[custom_headers] )
keyword[return] identifier[BunqResponseBytes] . identifier[cast_from_bunq_response] (
identifier[client] . identifier[BunqResponse] ( identifier[response_raw] . identifier[body_bytes] , identifier[response_raw] . identifier[headers] )
) | def list(cls, invoice_id, custom_headers=None):
"""
:type user_id: int
:type invoice_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseBytes
"""
if custom_headers is None:
custom_headers = {} # depends on [control=['if'], data=['custom_headers']]
api_client = client.ApiClient(cls._get_api_context())
endpoint_url = cls._ENDPOINT_URL_LISTING.format(cls._determine_user_id(), invoice_id)
response_raw = api_client.get(endpoint_url, {}, custom_headers)
return BunqResponseBytes.cast_from_bunq_response(client.BunqResponse(response_raw.body_bytes, response_raw.headers)) |
def pythons():
'''Install latest pythons with pyenv.
The python version will be activated in the projects base dir.
Will skip already installed latest python versions.
'''
if not _pyenv_exists():
print('\npyenv is not installed. You can install it with fabsetup '
'(https://github.com/theno/fabsetup):\n\n ' +
cyan('mkdir ~/repos && cd ~/repos\n '
'git clone https://github.com/theno/fabsetup.git\n '
'cd fabsetup && fab setup.pyenv -H localhost'))
return 1
latest_pythons = _determine_latest_pythons()
print(cyan('\n## install latest python versions'))
for version in latest_pythons:
local(flo('pyenv install --skip-existing {version}'))
print(cyan('\n## activate pythons'))
basedir = dirname(__file__)
latest_pythons_str = ' '.join(latest_pythons)
local(flo('cd {basedir} && pyenv local system {latest_pythons_str}'))
highest_python = latest_pythons[-1]
print(cyan(flo(
'\n## prepare Python-{highest_python} for testing and packaging')))
packages_for_testing = 'pytest tox'
packages_for_packaging = 'pypandoc twine'
local(flo('~/.pyenv/versions/{highest_python}/bin/pip install --upgrade '
'pip {packages_for_testing} {packages_for_packaging}')) | def function[pythons, parameter[]]:
constant[Install latest pythons with pyenv.
The python version will be activated in the projects base dir.
Will skip already installed latest python versions.
]
if <ast.UnaryOp object at 0x7da18bccb4f0> begin[:]
call[name[print], parameter[binary_operation[constant[
pyenv is not installed. You can install it with fabsetup (https://github.com/theno/fabsetup):
] + call[name[cyan], parameter[constant[mkdir ~/repos && cd ~/repos
git clone https://github.com/theno/fabsetup.git
cd fabsetup && fab setup.pyenv -H localhost]]]]]]
return[constant[1]]
variable[latest_pythons] assign[=] call[name[_determine_latest_pythons], parameter[]]
call[name[print], parameter[call[name[cyan], parameter[constant[
## install latest python versions]]]]]
for taget[name[version]] in starred[name[latest_pythons]] begin[:]
call[name[local], parameter[call[name[flo], parameter[constant[pyenv install --skip-existing {version}]]]]]
call[name[print], parameter[call[name[cyan], parameter[constant[
## activate pythons]]]]]
variable[basedir] assign[=] call[name[dirname], parameter[name[__file__]]]
variable[latest_pythons_str] assign[=] call[constant[ ].join, parameter[name[latest_pythons]]]
call[name[local], parameter[call[name[flo], parameter[constant[cd {basedir} && pyenv local system {latest_pythons_str}]]]]]
variable[highest_python] assign[=] call[name[latest_pythons]][<ast.UnaryOp object at 0x7da18bcca800>]
call[name[print], parameter[call[name[cyan], parameter[call[name[flo], parameter[constant[
## prepare Python-{highest_python} for testing and packaging]]]]]]]
variable[packages_for_testing] assign[=] constant[pytest tox]
variable[packages_for_packaging] assign[=] constant[pypandoc twine]
call[name[local], parameter[call[name[flo], parameter[constant[~/.pyenv/versions/{highest_python}/bin/pip install --upgrade pip {packages_for_testing} {packages_for_packaging}]]]]] | keyword[def] identifier[pythons] ():
literal[string]
keyword[if] keyword[not] identifier[_pyenv_exists] ():
identifier[print] ( literal[string]
literal[string] +
identifier[cyan] ( literal[string]
literal[string]
literal[string] ))
keyword[return] literal[int]
identifier[latest_pythons] = identifier[_determine_latest_pythons] ()
identifier[print] ( identifier[cyan] ( literal[string] ))
keyword[for] identifier[version] keyword[in] identifier[latest_pythons] :
identifier[local] ( identifier[flo] ( literal[string] ))
identifier[print] ( identifier[cyan] ( literal[string] ))
identifier[basedir] = identifier[dirname] ( identifier[__file__] )
identifier[latest_pythons_str] = literal[string] . identifier[join] ( identifier[latest_pythons] )
identifier[local] ( identifier[flo] ( literal[string] ))
identifier[highest_python] = identifier[latest_pythons] [- literal[int] ]
identifier[print] ( identifier[cyan] ( identifier[flo] (
literal[string] )))
identifier[packages_for_testing] = literal[string]
identifier[packages_for_packaging] = literal[string]
identifier[local] ( identifier[flo] ( literal[string]
literal[string] )) | def pythons():
"""Install latest pythons with pyenv.
The python version will be activated in the projects base dir.
Will skip already installed latest python versions.
"""
if not _pyenv_exists():
print('\npyenv is not installed. You can install it with fabsetup (https://github.com/theno/fabsetup):\n\n ' + cyan('mkdir ~/repos && cd ~/repos\n git clone https://github.com/theno/fabsetup.git\n cd fabsetup && fab setup.pyenv -H localhost'))
return 1 # depends on [control=['if'], data=[]]
latest_pythons = _determine_latest_pythons()
print(cyan('\n## install latest python versions'))
for version in latest_pythons:
local(flo('pyenv install --skip-existing {version}')) # depends on [control=['for'], data=[]]
print(cyan('\n## activate pythons'))
basedir = dirname(__file__)
latest_pythons_str = ' '.join(latest_pythons)
local(flo('cd {basedir} && pyenv local system {latest_pythons_str}'))
highest_python = latest_pythons[-1]
print(cyan(flo('\n## prepare Python-{highest_python} for testing and packaging')))
packages_for_testing = 'pytest tox'
packages_for_packaging = 'pypandoc twine'
local(flo('~/.pyenv/versions/{highest_python}/bin/pip install --upgrade pip {packages_for_testing} {packages_for_packaging}')) |
async def send_heartbeat(self, name):
"""Send a heartbeat for a service.
Args:
name (string): The name of the service to send a heartbeat for
"""
await self.send_command(OPERATIONS.CMD_HEARTBEAT, {'name': name},
MESSAGES.HeartbeatResponse, timeout=5.0) | <ast.AsyncFunctionDef object at 0x7da18fe918a0> | keyword[async] keyword[def] identifier[send_heartbeat] ( identifier[self] , identifier[name] ):
literal[string]
keyword[await] identifier[self] . identifier[send_command] ( identifier[OPERATIONS] . identifier[CMD_HEARTBEAT] ,{ literal[string] : identifier[name] },
identifier[MESSAGES] . identifier[HeartbeatResponse] , identifier[timeout] = literal[int] ) | async def send_heartbeat(self, name):
"""Send a heartbeat for a service.
Args:
name (string): The name of the service to send a heartbeat for
"""
await self.send_command(OPERATIONS.CMD_HEARTBEAT, {'name': name}, MESSAGES.HeartbeatResponse, timeout=5.0) |
def apply(self, matrix):
"""
Slices the supplied matrix and applies any transform bound to this window
"""
view = matrix[ self.indices() ]
return self.transform(view) if self.transform != None else view | def function[apply, parameter[self, matrix]]:
constant[
Slices the supplied matrix and applies any transform bound to this window
]
variable[view] assign[=] call[name[matrix]][call[name[self].indices, parameter[]]]
return[<ast.IfExp object at 0x7da1b12f30d0>] | keyword[def] identifier[apply] ( identifier[self] , identifier[matrix] ):
literal[string]
identifier[view] = identifier[matrix] [ identifier[self] . identifier[indices] ()]
keyword[return] identifier[self] . identifier[transform] ( identifier[view] ) keyword[if] identifier[self] . identifier[transform] != keyword[None] keyword[else] identifier[view] | def apply(self, matrix):
"""
Slices the supplied matrix and applies any transform bound to this window
"""
view = matrix[self.indices()]
return self.transform(view) if self.transform != None else view |
def visit_complex_value(self, node):
"""Convert :class:`ComplexValue` to one of ExactMatch, PartialMatch and Regex Value nodes."""
if node.value.startswith(ComplexValue.EXACT_VALUE_TOKEN):
value = node.value.strip(ComplexValue.EXACT_VALUE_TOKEN)
return ExactMatchValue(value)
elif node.value.startswith(ComplexValue.PARTIAL_VALUE_TOKEN):
value = node.value.strip(ComplexValue.PARTIAL_VALUE_TOKEN)
return PartialMatchValue(value, True if ast.GenericValue.WILDCARD_TOKEN in value else False)
elif node.value.startswith(ComplexValue.REGEX_VALUE_TOKEN):
return RegexValue(node.value.strip(ComplexValue.REGEX_VALUE_TOKEN))
else:
# Covering the case where ComplexValue supports more than ExactMatch, PartialMatch and Regex values.
msg = self.__class__.__name__ + ': Unrecognized complex value'
try:
msg += ' lookahead token: "' + node.value[0] + '"'
except IndexError:
msg += ': \"' + repr(node.value) + '"'
msg += '.\nUsing simple value instead: "' + node.value + '".'
logger.warn(msg)
return ast.Value(node.value) | def function[visit_complex_value, parameter[self, node]]:
constant[Convert :class:`ComplexValue` to one of ExactMatch, PartialMatch and Regex Value nodes.]
if call[name[node].value.startswith, parameter[name[ComplexValue].EXACT_VALUE_TOKEN]] begin[:]
variable[value] assign[=] call[name[node].value.strip, parameter[name[ComplexValue].EXACT_VALUE_TOKEN]]
return[call[name[ExactMatchValue], parameter[name[value]]]] | keyword[def] identifier[visit_complex_value] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[node] . identifier[value] . identifier[startswith] ( identifier[ComplexValue] . identifier[EXACT_VALUE_TOKEN] ):
identifier[value] = identifier[node] . identifier[value] . identifier[strip] ( identifier[ComplexValue] . identifier[EXACT_VALUE_TOKEN] )
keyword[return] identifier[ExactMatchValue] ( identifier[value] )
keyword[elif] identifier[node] . identifier[value] . identifier[startswith] ( identifier[ComplexValue] . identifier[PARTIAL_VALUE_TOKEN] ):
identifier[value] = identifier[node] . identifier[value] . identifier[strip] ( identifier[ComplexValue] . identifier[PARTIAL_VALUE_TOKEN] )
keyword[return] identifier[PartialMatchValue] ( identifier[value] , keyword[True] keyword[if] identifier[ast] . identifier[GenericValue] . identifier[WILDCARD_TOKEN] keyword[in] identifier[value] keyword[else] keyword[False] )
keyword[elif] identifier[node] . identifier[value] . identifier[startswith] ( identifier[ComplexValue] . identifier[REGEX_VALUE_TOKEN] ):
keyword[return] identifier[RegexValue] ( identifier[node] . identifier[value] . identifier[strip] ( identifier[ComplexValue] . identifier[REGEX_VALUE_TOKEN] ))
keyword[else] :
identifier[msg] = identifier[self] . identifier[__class__] . identifier[__name__] + literal[string]
keyword[try] :
identifier[msg] += literal[string] + identifier[node] . identifier[value] [ literal[int] ]+ literal[string]
keyword[except] identifier[IndexError] :
identifier[msg] += literal[string] + identifier[repr] ( identifier[node] . identifier[value] )+ literal[string]
identifier[msg] += literal[string] + identifier[node] . identifier[value] + literal[string]
identifier[logger] . identifier[warn] ( identifier[msg] )
keyword[return] identifier[ast] . identifier[Value] ( identifier[node] . identifier[value] ) | def visit_complex_value(self, node):
"""Convert :class:`ComplexValue` to one of ExactMatch, PartialMatch and Regex Value nodes."""
if node.value.startswith(ComplexValue.EXACT_VALUE_TOKEN):
value = node.value.strip(ComplexValue.EXACT_VALUE_TOKEN)
return ExactMatchValue(value) # depends on [control=['if'], data=[]]
elif node.value.startswith(ComplexValue.PARTIAL_VALUE_TOKEN):
value = node.value.strip(ComplexValue.PARTIAL_VALUE_TOKEN)
return PartialMatchValue(value, True if ast.GenericValue.WILDCARD_TOKEN in value else False) # depends on [control=['if'], data=[]]
elif node.value.startswith(ComplexValue.REGEX_VALUE_TOKEN):
return RegexValue(node.value.strip(ComplexValue.REGEX_VALUE_TOKEN)) # depends on [control=['if'], data=[]]
else:
# Covering the case where ComplexValue supports more than ExactMatch, PartialMatch and Regex values.
msg = self.__class__.__name__ + ': Unrecognized complex value'
try:
msg += ' lookahead token: "' + node.value[0] + '"' # depends on [control=['try'], data=[]]
except IndexError:
msg += ': "' + repr(node.value) + '"' # depends on [control=['except'], data=[]]
msg += '.\nUsing simple value instead: "' + node.value + '".'
logger.warn(msg)
return ast.Value(node.value) |
def edit(self, name=None, description=None, start_date=None, due_date=None, assignees=None, status=None):
"""Edit the details of an activity.
:param name: (optionally) edit the name of the activity
:type name: basestring or None
:param description: (optionally) edit the description of the activity
:type description: basestring or None
:param start_date: (optionally) edit the start date of the activity as a datetime object (UTC time/timezone
aware preferred)
:type start_date: datetime or None
:param due_date: (optionally) edit the due_date of the activity as a datetime object (UTC time/timzeone
aware preferred)
:type due_date: datetime or None
:param assignees: (optionally) edit the assignees of the activity as a list, will overwrite all assignees
:type assignees: list(basestring) or None
:param status: (optionally) edit the status of the activity as a string based
on :class:`~pykechain.enums.ActivityType`
:type status: basestring or None
:raises NotFoundError: if a `username` in the list of assignees is not in the list of scope members
:raises IllegalArgumentError: if the type of the inputs is not correct
:raises APIError: if another Error occurs
:warns: UserWarning - When a naive datetime is provided. Defaults to UTC.
Example
-------
>>> from datetime import datetime
>>> my_task = project.activity('Specify the wheel diameter')
>>> my_task.edit(name='Specify wheel diameter and circumference',
... description='The diameter and circumference are specified in inches',
... start_date=datetime.utcnow(), # naive time is interpreted as UTC time
... assignee='testuser')
If we want to provide timezone aware datetime objects we can use the 3rd party convenience library :mod:`pytz`.
Mind that we need to fetch the timezone first and use `<timezone>.localize(<your datetime>)` to make it
work correctly.
Using `datetime(2017,6,1,23,59,0 tzinfo=<tz>)` does NOT work for most timezones with a
daylight saving time. Check the `pytz <http://pythonhosted.org/pytz/#localized-times-and-date-arithmetic>`_
documentation.
To make it work using :mod:`pytz` and timezone aware :mod:`datetime` see the following example::
>>> import pytz
>>> start_date_tzaware = datetime.now(pytz.utc)
>>> mytimezone = pytz.timezone('Europe/Amsterdam')
>>> due_date_tzaware = mytimezone.localize(datetime(2019, 10, 27, 23, 59, 0))
>>> my_task.edit(due_date=due_date_tzaware, start_date=start_date_tzaware)
"""
update_dict = {'id': self.id}
if name:
if isinstance(name, (str, text_type)):
update_dict.update({'name': name})
self.name = name
else:
raise IllegalArgumentError('Name should be a string')
if description:
if isinstance(description, (str, text_type)):
update_dict.update({'description': description})
self.description = description
else:
raise IllegalArgumentError('Description should be a string')
if start_date:
if isinstance(start_date, datetime.datetime):
if not start_date.tzinfo:
warnings.warn("The startdate '{}' is naive and not timezone aware, use pytz.timezone info. "
"This date is interpreted as UTC time.".format(start_date.isoformat(sep=' ')))
update_dict.update({'start_date': start_date.isoformat(sep='T')})
else:
raise IllegalArgumentError('Start date should be a datetime.datetime() object')
if due_date:
if isinstance(due_date, datetime.datetime):
if not due_date.tzinfo:
warnings.warn("The duedate '{}' is naive and not timezone aware, use pytz.timezone info. "
"This date is interpreted as UTC time.".format(due_date.isoformat(sep=' ')))
update_dict.update({'due_date': due_date.isoformat(sep='T')})
else:
raise IllegalArgumentError('Due date should be a datetime.datetime() object')
if assignees:
if isinstance(assignees, list):
project = self._client.scope(pk=self.scope_id, status=None)
members_list = [member['username'] for member in project._json_data['members']]
for assignee in assignees:
if assignee not in members_list:
raise NotFoundError("Assignee '{}' should be a member of the scope".format(assignee))
update_dict.update({'assignees': assignees})
else:
raise IllegalArgumentError('Assignees should be a list')
if status:
if isinstance(status, (str, text_type)) and status in ActivityStatus.values():
update_dict.update({'status': status})
else:
raise IllegalArgumentError('Status should be a string')
url = self._client._build_url('activity', activity_id=self.id)
r = self._client._request('PUT', url, json=update_dict)
if r.status_code != requests.codes.ok: # pragma: no cover
raise APIError("Could not update Activity ({})".format(r))
if status:
self._json_data['status'] = str(status)
if assignees:
self._json_data['assignees'] = assignees
if due_date:
self._json_data['due_date'] = str(due_date)
if start_date:
self._json_data['start_date'] = str(start_date) | def function[edit, parameter[self, name, description, start_date, due_date, assignees, status]]:
constant[Edit the details of an activity.
:param name: (optionally) edit the name of the activity
:type name: basestring or None
:param description: (optionally) edit the description of the activity
:type description: basestring or None
:param start_date: (optionally) edit the start date of the activity as a datetime object (UTC time/timezone
aware preferred)
:type start_date: datetime or None
:param due_date: (optionally) edit the due_date of the activity as a datetime object (UTC time/timzeone
aware preferred)
:type due_date: datetime or None
:param assignees: (optionally) edit the assignees of the activity as a list, will overwrite all assignees
:type assignees: list(basestring) or None
:param status: (optionally) edit the status of the activity as a string based
on :class:`~pykechain.enums.ActivityType`
:type status: basestring or None
:raises NotFoundError: if a `username` in the list of assignees is not in the list of scope members
:raises IllegalArgumentError: if the type of the inputs is not correct
:raises APIError: if another Error occurs
:warns: UserWarning - When a naive datetime is provided. Defaults to UTC.
Example
-------
>>> from datetime import datetime
>>> my_task = project.activity('Specify the wheel diameter')
>>> my_task.edit(name='Specify wheel diameter and circumference',
... description='The diameter and circumference are specified in inches',
... start_date=datetime.utcnow(), # naive time is interpreted as UTC time
... assignee='testuser')
If we want to provide timezone aware datetime objects we can use the 3rd party convenience library :mod:`pytz`.
Mind that we need to fetch the timezone first and use `<timezone>.localize(<your datetime>)` to make it
work correctly.
Using `datetime(2017,6,1,23,59,0 tzinfo=<tz>)` does NOT work for most timezones with a
daylight saving time. Check the `pytz <http://pythonhosted.org/pytz/#localized-times-and-date-arithmetic>`_
documentation.
To make it work using :mod:`pytz` and timezone aware :mod:`datetime` see the following example::
>>> import pytz
>>> start_date_tzaware = datetime.now(pytz.utc)
>>> mytimezone = pytz.timezone('Europe/Amsterdam')
>>> due_date_tzaware = mytimezone.localize(datetime(2019, 10, 27, 23, 59, 0))
>>> my_task.edit(due_date=due_date_tzaware, start_date=start_date_tzaware)
]
variable[update_dict] assign[=] dictionary[[<ast.Constant object at 0x7da18dc06ec0>], [<ast.Attribute object at 0x7da18dc06560>]]
if name[name] begin[:]
if call[name[isinstance], parameter[name[name], tuple[[<ast.Name object at 0x7da18dc06e90>, <ast.Name object at 0x7da18dc06d40>]]]] begin[:]
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc04cd0>], [<ast.Name object at 0x7da18dc06440>]]]]
name[self].name assign[=] name[name]
if name[description] begin[:]
if call[name[isinstance], parameter[name[description], tuple[[<ast.Name object at 0x7da18dc04670>, <ast.Name object at 0x7da18dc05de0>]]]] begin[:]
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc07dc0>], [<ast.Name object at 0x7da18dc053c0>]]]]
name[self].description assign[=] name[description]
if name[start_date] begin[:]
if call[name[isinstance], parameter[name[start_date], name[datetime].datetime]] begin[:]
if <ast.UnaryOp object at 0x7da18dc076a0> begin[:]
call[name[warnings].warn, parameter[call[constant[The startdate '{}' is naive and not timezone aware, use pytz.timezone info. This date is interpreted as UTC time.].format, parameter[call[name[start_date].isoformat, parameter[]]]]]]
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc050c0>], [<ast.Call object at 0x7da18dc06a40>]]]]
if name[due_date] begin[:]
if call[name[isinstance], parameter[name[due_date], name[datetime].datetime]] begin[:]
if <ast.UnaryOp object at 0x7da18dc069e0> begin[:]
call[name[warnings].warn, parameter[call[constant[The duedate '{}' is naive and not timezone aware, use pytz.timezone info. This date is interpreted as UTC time.].format, parameter[call[name[due_date].isoformat, parameter[]]]]]]
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc06b30>], [<ast.Call object at 0x7da18dc05960>]]]]
if name[assignees] begin[:]
if call[name[isinstance], parameter[name[assignees], name[list]]] begin[:]
variable[project] assign[=] call[name[self]._client.scope, parameter[]]
variable[members_list] assign[=] <ast.ListComp object at 0x7da1b25d1e70>
for taget[name[assignee]] in starred[name[assignees]] begin[:]
if compare[name[assignee] <ast.NotIn object at 0x7da2590d7190> name[members_list]] begin[:]
<ast.Raise object at 0x7da1b25d1390>
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18f812170>], [<ast.Name object at 0x7da18f810d60>]]]]
if name[status] begin[:]
if <ast.BoolOp object at 0x7da18f811330> begin[:]
call[name[update_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da1b246a890>], [<ast.Name object at 0x7da1b246ab00>]]]]
variable[url] assign[=] call[name[self]._client._build_url, parameter[constant[activity]]]
variable[r] assign[=] call[name[self]._client._request, parameter[constant[PUT], name[url]]]
if compare[name[r].status_code not_equal[!=] name[requests].codes.ok] begin[:]
<ast.Raise object at 0x7da1b25d2380>
if name[status] begin[:]
call[name[self]._json_data][constant[status]] assign[=] call[name[str], parameter[name[status]]]
if name[assignees] begin[:]
call[name[self]._json_data][constant[assignees]] assign[=] name[assignees]
if name[due_date] begin[:]
call[name[self]._json_data][constant[due_date]] assign[=] call[name[str], parameter[name[due_date]]]
if name[start_date] begin[:]
call[name[self]._json_data][constant[start_date]] assign[=] call[name[str], parameter[name[start_date]]] | keyword[def] identifier[edit] ( identifier[self] , identifier[name] = keyword[None] , identifier[description] = keyword[None] , identifier[start_date] = keyword[None] , identifier[due_date] = keyword[None] , identifier[assignees] = keyword[None] , identifier[status] = keyword[None] ):
literal[string]
identifier[update_dict] ={ literal[string] : identifier[self] . identifier[id] }
keyword[if] identifier[name] :
keyword[if] identifier[isinstance] ( identifier[name] ,( identifier[str] , identifier[text_type] )):
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[name] })
identifier[self] . identifier[name] = identifier[name]
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
keyword[if] identifier[description] :
keyword[if] identifier[isinstance] ( identifier[description] ,( identifier[str] , identifier[text_type] )):
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[description] })
identifier[self] . identifier[description] = identifier[description]
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
keyword[if] identifier[start_date] :
keyword[if] identifier[isinstance] ( identifier[start_date] , identifier[datetime] . identifier[datetime] ):
keyword[if] keyword[not] identifier[start_date] . identifier[tzinfo] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[start_date] . identifier[isoformat] ( identifier[sep] = literal[string] )))
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[start_date] . identifier[isoformat] ( identifier[sep] = literal[string] )})
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
keyword[if] identifier[due_date] :
keyword[if] identifier[isinstance] ( identifier[due_date] , identifier[datetime] . identifier[datetime] ):
keyword[if] keyword[not] identifier[due_date] . identifier[tzinfo] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[due_date] . identifier[isoformat] ( identifier[sep] = literal[string] )))
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[due_date] . identifier[isoformat] ( identifier[sep] = literal[string] )})
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
keyword[if] identifier[assignees] :
keyword[if] identifier[isinstance] ( identifier[assignees] , identifier[list] ):
identifier[project] = identifier[self] . identifier[_client] . identifier[scope] ( identifier[pk] = identifier[self] . identifier[scope_id] , identifier[status] = keyword[None] )
identifier[members_list] =[ identifier[member] [ literal[string] ] keyword[for] identifier[member] keyword[in] identifier[project] . identifier[_json_data] [ literal[string] ]]
keyword[for] identifier[assignee] keyword[in] identifier[assignees] :
keyword[if] identifier[assignee] keyword[not] keyword[in] identifier[members_list] :
keyword[raise] identifier[NotFoundError] ( literal[string] . identifier[format] ( identifier[assignee] ))
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[assignees] })
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
keyword[if] identifier[status] :
keyword[if] identifier[isinstance] ( identifier[status] ,( identifier[str] , identifier[text_type] )) keyword[and] identifier[status] keyword[in] identifier[ActivityStatus] . identifier[values] ():
identifier[update_dict] . identifier[update] ({ literal[string] : identifier[status] })
keyword[else] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] )
identifier[url] = identifier[self] . identifier[_client] . identifier[_build_url] ( literal[string] , identifier[activity_id] = identifier[self] . identifier[id] )
identifier[r] = identifier[self] . identifier[_client] . identifier[_request] ( literal[string] , identifier[url] , identifier[json] = identifier[update_dict] )
keyword[if] identifier[r] . identifier[status_code] != identifier[requests] . identifier[codes] . identifier[ok] :
keyword[raise] identifier[APIError] ( literal[string] . identifier[format] ( identifier[r] ))
keyword[if] identifier[status] :
identifier[self] . identifier[_json_data] [ literal[string] ]= identifier[str] ( identifier[status] )
keyword[if] identifier[assignees] :
identifier[self] . identifier[_json_data] [ literal[string] ]= identifier[assignees]
keyword[if] identifier[due_date] :
identifier[self] . identifier[_json_data] [ literal[string] ]= identifier[str] ( identifier[due_date] )
keyword[if] identifier[start_date] :
identifier[self] . identifier[_json_data] [ literal[string] ]= identifier[str] ( identifier[start_date] ) | def edit(self, name=None, description=None, start_date=None, due_date=None, assignees=None, status=None):
"""Edit the details of an activity.
:param name: (optionally) edit the name of the activity
:type name: basestring or None
:param description: (optionally) edit the description of the activity
:type description: basestring or None
:param start_date: (optionally) edit the start date of the activity as a datetime object (UTC time/timezone
aware preferred)
:type start_date: datetime or None
:param due_date: (optionally) edit the due_date of the activity as a datetime object (UTC time/timzeone
aware preferred)
:type due_date: datetime or None
:param assignees: (optionally) edit the assignees of the activity as a list, will overwrite all assignees
:type assignees: list(basestring) or None
:param status: (optionally) edit the status of the activity as a string based
on :class:`~pykechain.enums.ActivityType`
:type status: basestring or None
:raises NotFoundError: if a `username` in the list of assignees is not in the list of scope members
:raises IllegalArgumentError: if the type of the inputs is not correct
:raises APIError: if another Error occurs
:warns: UserWarning - When a naive datetime is provided. Defaults to UTC.
Example
-------
>>> from datetime import datetime
>>> my_task = project.activity('Specify the wheel diameter')
>>> my_task.edit(name='Specify wheel diameter and circumference',
... description='The diameter and circumference are specified in inches',
... start_date=datetime.utcnow(), # naive time is interpreted as UTC time
... assignee='testuser')
If we want to provide timezone aware datetime objects we can use the 3rd party convenience library :mod:`pytz`.
Mind that we need to fetch the timezone first and use `<timezone>.localize(<your datetime>)` to make it
work correctly.
Using `datetime(2017,6,1,23,59,0 tzinfo=<tz>)` does NOT work for most timezones with a
daylight saving time. Check the `pytz <http://pythonhosted.org/pytz/#localized-times-and-date-arithmetic>`_
documentation.
To make it work using :mod:`pytz` and timezone aware :mod:`datetime` see the following example::
>>> import pytz
>>> start_date_tzaware = datetime.now(pytz.utc)
>>> mytimezone = pytz.timezone('Europe/Amsterdam')
>>> due_date_tzaware = mytimezone.localize(datetime(2019, 10, 27, 23, 59, 0))
>>> my_task.edit(due_date=due_date_tzaware, start_date=start_date_tzaware)
"""
update_dict = {'id': self.id}
if name:
if isinstance(name, (str, text_type)):
update_dict.update({'name': name})
self.name = name # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Name should be a string') # depends on [control=['if'], data=[]]
if description:
if isinstance(description, (str, text_type)):
update_dict.update({'description': description})
self.description = description # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Description should be a string') # depends on [control=['if'], data=[]]
if start_date:
if isinstance(start_date, datetime.datetime):
if not start_date.tzinfo:
warnings.warn("The startdate '{}' is naive and not timezone aware, use pytz.timezone info. This date is interpreted as UTC time.".format(start_date.isoformat(sep=' '))) # depends on [control=['if'], data=[]]
update_dict.update({'start_date': start_date.isoformat(sep='T')}) # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Start date should be a datetime.datetime() object') # depends on [control=['if'], data=[]]
if due_date:
if isinstance(due_date, datetime.datetime):
if not due_date.tzinfo:
warnings.warn("The duedate '{}' is naive and not timezone aware, use pytz.timezone info. This date is interpreted as UTC time.".format(due_date.isoformat(sep=' '))) # depends on [control=['if'], data=[]]
update_dict.update({'due_date': due_date.isoformat(sep='T')}) # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Due date should be a datetime.datetime() object') # depends on [control=['if'], data=[]]
if assignees:
if isinstance(assignees, list):
project = self._client.scope(pk=self.scope_id, status=None)
members_list = [member['username'] for member in project._json_data['members']]
for assignee in assignees:
if assignee not in members_list:
raise NotFoundError("Assignee '{}' should be a member of the scope".format(assignee)) # depends on [control=['if'], data=['assignee']] # depends on [control=['for'], data=['assignee']]
update_dict.update({'assignees': assignees}) # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Assignees should be a list') # depends on [control=['if'], data=[]]
if status:
if isinstance(status, (str, text_type)) and status in ActivityStatus.values():
update_dict.update({'status': status}) # depends on [control=['if'], data=[]]
else:
raise IllegalArgumentError('Status should be a string') # depends on [control=['if'], data=[]]
url = self._client._build_url('activity', activity_id=self.id)
r = self._client._request('PUT', url, json=update_dict)
if r.status_code != requests.codes.ok: # pragma: no cover
raise APIError('Could not update Activity ({})'.format(r)) # depends on [control=['if'], data=[]]
if status:
self._json_data['status'] = str(status) # depends on [control=['if'], data=[]]
if assignees:
self._json_data['assignees'] = assignees # depends on [control=['if'], data=[]]
if due_date:
self._json_data['due_date'] = str(due_date) # depends on [control=['if'], data=[]]
if start_date:
self._json_data['start_date'] = str(start_date) # depends on [control=['if'], data=[]] |
def _send(self, packet):
"""Add packet to send queue."""
fut = self.loop.create_future()
self.waiters.append((fut, packet))
if self.waiters and self.in_transaction is False:
self.protocol.send_packet()
return fut | def function[_send, parameter[self, packet]]:
constant[Add packet to send queue.]
variable[fut] assign[=] call[name[self].loop.create_future, parameter[]]
call[name[self].waiters.append, parameter[tuple[[<ast.Name object at 0x7da1b27824a0>, <ast.Name object at 0x7da1b2780190>]]]]
if <ast.BoolOp object at 0x7da1b2782710> begin[:]
call[name[self].protocol.send_packet, parameter[]]
return[name[fut]] | keyword[def] identifier[_send] ( identifier[self] , identifier[packet] ):
literal[string]
identifier[fut] = identifier[self] . identifier[loop] . identifier[create_future] ()
identifier[self] . identifier[waiters] . identifier[append] (( identifier[fut] , identifier[packet] ))
keyword[if] identifier[self] . identifier[waiters] keyword[and] identifier[self] . identifier[in_transaction] keyword[is] keyword[False] :
identifier[self] . identifier[protocol] . identifier[send_packet] ()
keyword[return] identifier[fut] | def _send(self, packet):
"""Add packet to send queue."""
fut = self.loop.create_future()
self.waiters.append((fut, packet))
if self.waiters and self.in_transaction is False:
self.protocol.send_packet() # depends on [control=['if'], data=[]]
return fut |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.