code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _get_dict_char_count(txt):
"""
reads the characters in txt and returns a dictionary
of all letters
"""
dct = {}
for letter in txt:
if letter in dct:
dct[letter] += 1
else:
dct[letter] = 1
return dct | def function[_get_dict_char_count, parameter[txt]]:
constant[
reads the characters in txt and returns a dictionary
of all letters
]
variable[dct] assign[=] dictionary[[], []]
for taget[name[letter]] in starred[name[txt]] begin[:]
if compare[name[letter] in name[dct]] begin[:]
<ast.AugAssign object at 0x7da18fe930a0>
return[name[dct]] | keyword[def] identifier[_get_dict_char_count] ( identifier[txt] ):
literal[string]
identifier[dct] ={}
keyword[for] identifier[letter] keyword[in] identifier[txt] :
keyword[if] identifier[letter] keyword[in] identifier[dct] :
identifier[dct] [ identifier[letter] ]+= literal[int]
keyword[else] :
identifier[dct] [ identifier[letter] ]= literal[int]
keyword[return] identifier[dct] | def _get_dict_char_count(txt):
"""
reads the characters in txt and returns a dictionary
of all letters
"""
dct = {}
for letter in txt:
if letter in dct:
dct[letter] += 1 # depends on [control=['if'], data=['letter', 'dct']]
else:
dct[letter] = 1 # depends on [control=['for'], data=['letter']]
return dct |
def parseBEDString(line, scoreType=int, dropAfter=None):
"""
Parse a string in BED format and return a GenomicInterval object.
:param line: the string to be parsed
:param dropAfter: an int indicating that any fields after and including
this field should be ignored as they don't conform to
the BED format. By default, None, meaning we use all
fields. Index from zero.
:return: GenomicInterval object built from the BED string representation
"""
peices = line.split("\t")
if dropAfter is not None:
peices = peices[0:dropAfter]
if len(peices) < 3:
raise GenomicIntervalError("BED elements must have at least chrom, " +
"start and end; found only " +
str(len(peices)) + " in " + line)
chrom = peices[0]
start = peices[1]
end = peices[2]
name = None
score = None
strand = None
if len(peices) >= 4 is not None:
name = peices[3]
if len(peices) >= 5 is not None:
score = peices[4]
if len(peices) >= 6 is not None:
strand = peices[5]
return GenomicInterval(chrom, start, end, name, score, strand, scoreType) | def function[parseBEDString, parameter[line, scoreType, dropAfter]]:
constant[
Parse a string in BED format and return a GenomicInterval object.
:param line: the string to be parsed
:param dropAfter: an int indicating that any fields after and including
this field should be ignored as they don't conform to
the BED format. By default, None, meaning we use all
fields. Index from zero.
:return: GenomicInterval object built from the BED string representation
]
variable[peices] assign[=] call[name[line].split, parameter[constant[ ]]]
if compare[name[dropAfter] is_not constant[None]] begin[:]
variable[peices] assign[=] call[name[peices]][<ast.Slice object at 0x7da18f58ff70>]
if compare[call[name[len], parameter[name[peices]]] less[<] constant[3]] begin[:]
<ast.Raise object at 0x7da18f58d3f0>
variable[chrom] assign[=] call[name[peices]][constant[0]]
variable[start] assign[=] call[name[peices]][constant[1]]
variable[end] assign[=] call[name[peices]][constant[2]]
variable[name] assign[=] constant[None]
variable[score] assign[=] constant[None]
variable[strand] assign[=] constant[None]
if compare[call[name[len], parameter[name[peices]]] greater_or_equal[>=] constant[4]] begin[:]
variable[name] assign[=] call[name[peices]][constant[3]]
if compare[call[name[len], parameter[name[peices]]] greater_or_equal[>=] constant[5]] begin[:]
variable[score] assign[=] call[name[peices]][constant[4]]
if compare[call[name[len], parameter[name[peices]]] greater_or_equal[>=] constant[6]] begin[:]
variable[strand] assign[=] call[name[peices]][constant[5]]
return[call[name[GenomicInterval], parameter[name[chrom], name[start], name[end], name[name], name[score], name[strand], name[scoreType]]]] | keyword[def] identifier[parseBEDString] ( identifier[line] , identifier[scoreType] = identifier[int] , identifier[dropAfter] = keyword[None] ):
literal[string]
identifier[peices] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] identifier[dropAfter] keyword[is] keyword[not] keyword[None] :
identifier[peices] = identifier[peices] [ literal[int] : identifier[dropAfter] ]
keyword[if] identifier[len] ( identifier[peices] )< literal[int] :
keyword[raise] identifier[GenomicIntervalError] ( literal[string] +
literal[string] +
identifier[str] ( identifier[len] ( identifier[peices] ))+ literal[string] + identifier[line] )
identifier[chrom] = identifier[peices] [ literal[int] ]
identifier[start] = identifier[peices] [ literal[int] ]
identifier[end] = identifier[peices] [ literal[int] ]
identifier[name] = keyword[None]
identifier[score] = keyword[None]
identifier[strand] = keyword[None]
keyword[if] identifier[len] ( identifier[peices] )>= literal[int] keyword[is] keyword[not] keyword[None] :
identifier[name] = identifier[peices] [ literal[int] ]
keyword[if] identifier[len] ( identifier[peices] )>= literal[int] keyword[is] keyword[not] keyword[None] :
identifier[score] = identifier[peices] [ literal[int] ]
keyword[if] identifier[len] ( identifier[peices] )>= literal[int] keyword[is] keyword[not] keyword[None] :
identifier[strand] = identifier[peices] [ literal[int] ]
keyword[return] identifier[GenomicInterval] ( identifier[chrom] , identifier[start] , identifier[end] , identifier[name] , identifier[score] , identifier[strand] , identifier[scoreType] ) | def parseBEDString(line, scoreType=int, dropAfter=None):
"""
Parse a string in BED format and return a GenomicInterval object.
:param line: the string to be parsed
:param dropAfter: an int indicating that any fields after and including
this field should be ignored as they don't conform to
the BED format. By default, None, meaning we use all
fields. Index from zero.
:return: GenomicInterval object built from the BED string representation
"""
peices = line.split('\t')
if dropAfter is not None:
peices = peices[0:dropAfter] # depends on [control=['if'], data=['dropAfter']]
if len(peices) < 3:
raise GenomicIntervalError('BED elements must have at least chrom, ' + 'start and end; found only ' + str(len(peices)) + ' in ' + line) # depends on [control=['if'], data=[]]
chrom = peices[0]
start = peices[1]
end = peices[2]
name = None
score = None
strand = None
if len(peices) >= 4 is not None:
name = peices[3] # depends on [control=['if'], data=[]]
if len(peices) >= 5 is not None:
score = peices[4] # depends on [control=['if'], data=[]]
if len(peices) >= 6 is not None:
strand = peices[5] # depends on [control=['if'], data=[]]
return GenomicInterval(chrom, start, end, name, score, strand, scoreType) |
def create_user(self, user):
"""
Create a user on the local system
"""
self.run("useradd -m %s" % user)
usr = getpwnam(user)
return usr | def function[create_user, parameter[self, user]]:
constant[
Create a user on the local system
]
call[name[self].run, parameter[binary_operation[constant[useradd -m %s] <ast.Mod object at 0x7da2590d6920> name[user]]]]
variable[usr] assign[=] call[name[getpwnam], parameter[name[user]]]
return[name[usr]] | keyword[def] identifier[create_user] ( identifier[self] , identifier[user] ):
literal[string]
identifier[self] . identifier[run] ( literal[string] % identifier[user] )
identifier[usr] = identifier[getpwnam] ( identifier[user] )
keyword[return] identifier[usr] | def create_user(self, user):
"""
Create a user on the local system
"""
self.run('useradd -m %s' % user)
usr = getpwnam(user)
return usr |
def _CallFlowLegacy(self,
flow_name=None,
next_state=None,
request_data=None,
client_id=None,
base_session_id=None,
**kwargs):
"""Creates a new flow and send its responses to a state.
This creates a new flow. The flow may send back many responses which will be
queued by the framework until the flow terminates. The final status message
will cause the entire transaction to be committed to the specified state.
Args:
flow_name: The name of the flow to invoke.
next_state: The state in this flow, that responses to this message should
go to.
request_data: Any dict provided here will be available in the
RequestState protobuf. The Responses object maintains a reference to
this protobuf for use in the execution of the state method. (so you can
access this data by responses.request). There is no format mandated on
this data but it may be a serialized protobuf.
client_id: If given, the flow is started for this client.
base_session_id: A URN which will be used to build a URN.
**kwargs: Arguments for the child flow.
Returns:
The URN of the child flow which was created.
Raises:
RuntimeError: In case of no cpu quota left to start more clients.
"""
client_id = client_id or self.runner_args.client_id
# We prepare a request state, and add it to our queue - any
# responses from the child flow will return to the request state
# and the stated next_state. Note however, that there is no
# client_id or actual request message here because we directly
# invoke the child flow rather than queue anything for it.
state = rdf_flow_runner.RequestState(
id=self.GetNextOutboundId(),
session_id=utils.SmartUnicode(self.session_id),
client_id=client_id,
next_state=next_state,
response_count=0)
if request_data:
state.data = rdf_protodict.Dict().FromDict(request_data)
# Pass our logs collection urn to the flow object.
logs_urn = self.hunt_obj.logs_collection_urn
# If we were called with write_intermediate_results, propagate down to
# child flows. This allows write_intermediate_results to be set to True
# either at the top level parent, or somewhere in the middle of
# the call chain.
write_intermediate = kwargs.pop("write_intermediate_results", False)
# Create the new child flow but do not notify the user about it.
child_urn = self.hunt_obj.StartAFF4Flow(
base_session_id=base_session_id or self.session_id,
client_id=client_id,
cpu_limit=self._GetSubFlowCPULimit(),
flow_name=flow_name,
logs_collection_urn=logs_urn,
network_bytes_limit=self._GetSubFlowNetworkLimit(),
notify_to_user=False,
parent_flow=self.hunt_obj,
queue=self.runner_args.queue,
request_state=state,
sync=False,
token=self.token,
write_intermediate_results=write_intermediate,
**kwargs)
self.QueueRequest(state)
return child_urn | def function[_CallFlowLegacy, parameter[self, flow_name, next_state, request_data, client_id, base_session_id]]:
constant[Creates a new flow and send its responses to a state.
This creates a new flow. The flow may send back many responses which will be
queued by the framework until the flow terminates. The final status message
will cause the entire transaction to be committed to the specified state.
Args:
flow_name: The name of the flow to invoke.
next_state: The state in this flow, that responses to this message should
go to.
request_data: Any dict provided here will be available in the
RequestState protobuf. The Responses object maintains a reference to
this protobuf for use in the execution of the state method. (so you can
access this data by responses.request). There is no format mandated on
this data but it may be a serialized protobuf.
client_id: If given, the flow is started for this client.
base_session_id: A URN which will be used to build a URN.
**kwargs: Arguments for the child flow.
Returns:
The URN of the child flow which was created.
Raises:
RuntimeError: In case of no cpu quota left to start more clients.
]
variable[client_id] assign[=] <ast.BoolOp object at 0x7da20cabee90>
variable[state] assign[=] call[name[rdf_flow_runner].RequestState, parameter[]]
if name[request_data] begin[:]
name[state].data assign[=] call[call[name[rdf_protodict].Dict, parameter[]].FromDict, parameter[name[request_data]]]
variable[logs_urn] assign[=] name[self].hunt_obj.logs_collection_urn
variable[write_intermediate] assign[=] call[name[kwargs].pop, parameter[constant[write_intermediate_results], constant[False]]]
variable[child_urn] assign[=] call[name[self].hunt_obj.StartAFF4Flow, parameter[]]
call[name[self].QueueRequest, parameter[name[state]]]
return[name[child_urn]] | keyword[def] identifier[_CallFlowLegacy] ( identifier[self] ,
identifier[flow_name] = keyword[None] ,
identifier[next_state] = keyword[None] ,
identifier[request_data] = keyword[None] ,
identifier[client_id] = keyword[None] ,
identifier[base_session_id] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[client_id] = identifier[client_id] keyword[or] identifier[self] . identifier[runner_args] . identifier[client_id]
identifier[state] = identifier[rdf_flow_runner] . identifier[RequestState] (
identifier[id] = identifier[self] . identifier[GetNextOutboundId] (),
identifier[session_id] = identifier[utils] . identifier[SmartUnicode] ( identifier[self] . identifier[session_id] ),
identifier[client_id] = identifier[client_id] ,
identifier[next_state] = identifier[next_state] ,
identifier[response_count] = literal[int] )
keyword[if] identifier[request_data] :
identifier[state] . identifier[data] = identifier[rdf_protodict] . identifier[Dict] (). identifier[FromDict] ( identifier[request_data] )
identifier[logs_urn] = identifier[self] . identifier[hunt_obj] . identifier[logs_collection_urn]
identifier[write_intermediate] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[child_urn] = identifier[self] . identifier[hunt_obj] . identifier[StartAFF4Flow] (
identifier[base_session_id] = identifier[base_session_id] keyword[or] identifier[self] . identifier[session_id] ,
identifier[client_id] = identifier[client_id] ,
identifier[cpu_limit] = identifier[self] . identifier[_GetSubFlowCPULimit] (),
identifier[flow_name] = identifier[flow_name] ,
identifier[logs_collection_urn] = identifier[logs_urn] ,
identifier[network_bytes_limit] = identifier[self] . identifier[_GetSubFlowNetworkLimit] (),
identifier[notify_to_user] = keyword[False] ,
identifier[parent_flow] = identifier[self] . identifier[hunt_obj] ,
identifier[queue] = identifier[self] . identifier[runner_args] . identifier[queue] ,
identifier[request_state] = identifier[state] ,
identifier[sync] = keyword[False] ,
identifier[token] = identifier[self] . identifier[token] ,
identifier[write_intermediate_results] = identifier[write_intermediate] ,
** identifier[kwargs] )
identifier[self] . identifier[QueueRequest] ( identifier[state] )
keyword[return] identifier[child_urn] | def _CallFlowLegacy(self, flow_name=None, next_state=None, request_data=None, client_id=None, base_session_id=None, **kwargs):
"""Creates a new flow and send its responses to a state.
This creates a new flow. The flow may send back many responses which will be
queued by the framework until the flow terminates. The final status message
will cause the entire transaction to be committed to the specified state.
Args:
flow_name: The name of the flow to invoke.
next_state: The state in this flow, that responses to this message should
go to.
request_data: Any dict provided here will be available in the
RequestState protobuf. The Responses object maintains a reference to
this protobuf for use in the execution of the state method. (so you can
access this data by responses.request). There is no format mandated on
this data but it may be a serialized protobuf.
client_id: If given, the flow is started for this client.
base_session_id: A URN which will be used to build a URN.
**kwargs: Arguments for the child flow.
Returns:
The URN of the child flow which was created.
Raises:
RuntimeError: In case of no cpu quota left to start more clients.
"""
client_id = client_id or self.runner_args.client_id
# We prepare a request state, and add it to our queue - any
# responses from the child flow will return to the request state
# and the stated next_state. Note however, that there is no
# client_id or actual request message here because we directly
# invoke the child flow rather than queue anything for it.
state = rdf_flow_runner.RequestState(id=self.GetNextOutboundId(), session_id=utils.SmartUnicode(self.session_id), client_id=client_id, next_state=next_state, response_count=0)
if request_data:
state.data = rdf_protodict.Dict().FromDict(request_data) # depends on [control=['if'], data=[]]
# Pass our logs collection urn to the flow object.
logs_urn = self.hunt_obj.logs_collection_urn
# If we were called with write_intermediate_results, propagate down to
# child flows. This allows write_intermediate_results to be set to True
# either at the top level parent, or somewhere in the middle of
# the call chain.
write_intermediate = kwargs.pop('write_intermediate_results', False)
# Create the new child flow but do not notify the user about it.
child_urn = self.hunt_obj.StartAFF4Flow(base_session_id=base_session_id or self.session_id, client_id=client_id, cpu_limit=self._GetSubFlowCPULimit(), flow_name=flow_name, logs_collection_urn=logs_urn, network_bytes_limit=self._GetSubFlowNetworkLimit(), notify_to_user=False, parent_flow=self.hunt_obj, queue=self.runner_args.queue, request_state=state, sync=False, token=self.token, write_intermediate_results=write_intermediate, **kwargs)
self.QueueRequest(state)
return child_urn |
def open(self, user=None, repo=None):
'''Open the URL of a repository in the user's browser'''
webbrowser.open(self.format_path(repo, namespace=user, rw=False)) | def function[open, parameter[self, user, repo]]:
constant[Open the URL of a repository in the user's browser]
call[name[webbrowser].open, parameter[call[name[self].format_path, parameter[name[repo]]]]] | keyword[def] identifier[open] ( identifier[self] , identifier[user] = keyword[None] , identifier[repo] = keyword[None] ):
literal[string]
identifier[webbrowser] . identifier[open] ( identifier[self] . identifier[format_path] ( identifier[repo] , identifier[namespace] = identifier[user] , identifier[rw] = keyword[False] )) | def open(self, user=None, repo=None):
"""Open the URL of a repository in the user's browser"""
webbrowser.open(self.format_path(repo, namespace=user, rw=False)) |
def do_setup_for_pip_local(self, repo):
'''
Configure repo to be directory based with directory `~/.pip/local`.
Also makes that directory if needed.
'''
effective_repo_name = self.get_effective_repo_name(repo)
self.abort_on_nonexisting_repo(
effective_repo_name, 'setup_for_pip_local'
)
self.network.setup_for_pip_local(effective_repo_name) | def function[do_setup_for_pip_local, parameter[self, repo]]:
constant[
Configure repo to be directory based with directory `~/.pip/local`.
Also makes that directory if needed.
]
variable[effective_repo_name] assign[=] call[name[self].get_effective_repo_name, parameter[name[repo]]]
call[name[self].abort_on_nonexisting_repo, parameter[name[effective_repo_name], constant[setup_for_pip_local]]]
call[name[self].network.setup_for_pip_local, parameter[name[effective_repo_name]]] | keyword[def] identifier[do_setup_for_pip_local] ( identifier[self] , identifier[repo] ):
literal[string]
identifier[effective_repo_name] = identifier[self] . identifier[get_effective_repo_name] ( identifier[repo] )
identifier[self] . identifier[abort_on_nonexisting_repo] (
identifier[effective_repo_name] , literal[string]
)
identifier[self] . identifier[network] . identifier[setup_for_pip_local] ( identifier[effective_repo_name] ) | def do_setup_for_pip_local(self, repo):
"""
Configure repo to be directory based with directory `~/.pip/local`.
Also makes that directory if needed.
"""
effective_repo_name = self.get_effective_repo_name(repo)
self.abort_on_nonexisting_repo(effective_repo_name, 'setup_for_pip_local')
self.network.setup_for_pip_local(effective_repo_name) |
def refresh_swagger(self):
"""
Manually refresh the swagger document. This can help resolve errors communicate with the API.
"""
try:
os.remove(self._get_swagger_filename(self.swagger_url))
except EnvironmentError as e:
logger.warn(os.strerror(e.errno))
else:
self.__init__() | def function[refresh_swagger, parameter[self]]:
constant[
Manually refresh the swagger document. This can help resolve errors communicate with the API.
]
<ast.Try object at 0x7da204566c20> | keyword[def] identifier[refresh_swagger] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[os] . identifier[remove] ( identifier[self] . identifier[_get_swagger_filename] ( identifier[self] . identifier[swagger_url] ))
keyword[except] identifier[EnvironmentError] keyword[as] identifier[e] :
identifier[logger] . identifier[warn] ( identifier[os] . identifier[strerror] ( identifier[e] . identifier[errno] ))
keyword[else] :
identifier[self] . identifier[__init__] () | def refresh_swagger(self):
"""
Manually refresh the swagger document. This can help resolve errors communicate with the API.
"""
try:
os.remove(self._get_swagger_filename(self.swagger_url)) # depends on [control=['try'], data=[]]
except EnvironmentError as e:
logger.warn(os.strerror(e.errno)) # depends on [control=['except'], data=['e']]
else:
self.__init__() |
def get_validated_types(object_types: Set[Type], set_name: str) -> Set[Type]:
"""
Utility to validate a set of types :
* None is not allowed as a whole or within the set,
* object and Any are converted into AnyObject
* if AnyObject is in the set, it must be the only element
:param object_types: the set of types to validate
:param set_name: a name used in exceptions if any
:return: the fixed set of types
"""
check_var(object_types, var_types=set, var_name=set_name)
res = {get_validated_type(typ, set_name + '[x]') for typ in object_types}
if AnyObject in res and len(res) > 1:
raise ValueError('The set of types contains \'object\'/\'Any\'/\'AnyObject\', so no other type must be present '
'in the set')
else:
return res | def function[get_validated_types, parameter[object_types, set_name]]:
constant[
Utility to validate a set of types :
* None is not allowed as a whole or within the set,
* object and Any are converted into AnyObject
* if AnyObject is in the set, it must be the only element
:param object_types: the set of types to validate
:param set_name: a name used in exceptions if any
:return: the fixed set of types
]
call[name[check_var], parameter[name[object_types]]]
variable[res] assign[=] <ast.SetComp object at 0x7da204564970>
if <ast.BoolOp object at 0x7da2045668c0> begin[:]
<ast.Raise object at 0x7da2045649a0> | keyword[def] identifier[get_validated_types] ( identifier[object_types] : identifier[Set] [ identifier[Type] ], identifier[set_name] : identifier[str] )-> identifier[Set] [ identifier[Type] ]:
literal[string]
identifier[check_var] ( identifier[object_types] , identifier[var_types] = identifier[set] , identifier[var_name] = identifier[set_name] )
identifier[res] ={ identifier[get_validated_type] ( identifier[typ] , identifier[set_name] + literal[string] ) keyword[for] identifier[typ] keyword[in] identifier[object_types] }
keyword[if] identifier[AnyObject] keyword[in] identifier[res] keyword[and] identifier[len] ( identifier[res] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
keyword[return] identifier[res] | def get_validated_types(object_types: Set[Type], set_name: str) -> Set[Type]:
"""
Utility to validate a set of types :
* None is not allowed as a whole or within the set,
* object and Any are converted into AnyObject
* if AnyObject is in the set, it must be the only element
:param object_types: the set of types to validate
:param set_name: a name used in exceptions if any
:return: the fixed set of types
"""
check_var(object_types, var_types=set, var_name=set_name)
res = {get_validated_type(typ, set_name + '[x]') for typ in object_types}
if AnyObject in res and len(res) > 1:
raise ValueError("The set of types contains 'object'/'Any'/'AnyObject', so no other type must be present in the set") # depends on [control=['if'], data=[]]
else:
return res |
def copy(self):
"""Make a copy of current operator."""
# pylint: disable=no-value-for-parameter
# The constructor of subclasses from raw data should be a copy
return self.__class__(self.data, self.input_dims(), self.output_dims()) | def function[copy, parameter[self]]:
constant[Make a copy of current operator.]
return[call[name[self].__class__, parameter[name[self].data, call[name[self].input_dims, parameter[]], call[name[self].output_dims, parameter[]]]]] | keyword[def] identifier[copy] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[data] , identifier[self] . identifier[input_dims] (), identifier[self] . identifier[output_dims] ()) | def copy(self):
"""Make a copy of current operator."""
# pylint: disable=no-value-for-parameter
# The constructor of subclasses from raw data should be a copy
return self.__class__(self.data, self.input_dims(), self.output_dims()) |
def push(image,
insecure_registry=False,
api_response=False,
client_timeout=salt.utils.docker.CLIENT_TIMEOUT):
'''
.. versionchanged:: 2015.8.4
The ``Id`` and ``Image`` keys are no longer present in the return data.
This is due to changes in the Docker Remote API.
Pushes an image to a Docker registry. See the documentation at top of this
page to configure authentication credentials.
image
Image to be pushed. If just the repository name is passed, then all
tagged images for the specified repo will be pushed. If the image name
is passed in ``repo:tag`` notation, only the specified image will be
pushed.
insecure_registry : False
If ``True``, the Docker client will permit the use of insecure
(non-HTTPS) registries.
api_response : False
If ``True``, an ``API_Response`` key will be present in the return
data, containing the raw output from the Docker API.
client_timeout
Timeout in seconds for the Docker client. This is not a timeout for
this function, but for receiving a response from the API.
**RETURN DATA**
A dictionary will be returned, containing the following keys:
- ``Layers`` - A dictionary containing one or more of the following keys:
- ``Already_Pushed`` - Layers that that were already present on the
Minion
- ``Pushed`` - Layers that that were pushed
- ``Time_Elapsed`` - Time in seconds taken to perform the push
CLI Example:
.. code-block:: bash
salt myminion docker.push myuser/mycontainer
salt myminion docker.push myuser/mycontainer:mytag
'''
if not isinstance(image, six.string_types):
image = six.text_type(image)
kwargs = {'stream': True,
'client_timeout': client_timeout}
if insecure_registry:
kwargs['insecure_registry'] = insecure_registry
time_started = time.time()
response = _client_wrapper('push', image, **kwargs)
ret = {'Time_Elapsed': time.time() - time_started, 'retcode': 0}
_clear_context()
if not response:
raise CommandExecutionError(
'Push failed for {0}, no response returned from Docker API'
.format(image)
)
elif api_response:
ret['API_Response'] = response
errors = []
# Iterate through API response and collect information
for event in response:
try:
event = salt.utils.json.loads(event)
except Exception as exc:
raise CommandExecutionError(
'Unable to interpret API event: \'{0}\''.format(event),
info={'Error': exc.__str__()}
)
try:
event_type = next(iter(event))
except StopIteration:
continue
if event_type == 'status':
_push_status(ret, event)
elif event_type == 'errorDetail':
_error_detail(errors, event)
if errors:
ret['Errors'] = errors
ret['retcode'] = 1
return ret | def function[push, parameter[image, insecure_registry, api_response, client_timeout]]:
constant[
.. versionchanged:: 2015.8.4
The ``Id`` and ``Image`` keys are no longer present in the return data.
This is due to changes in the Docker Remote API.
Pushes an image to a Docker registry. See the documentation at top of this
page to configure authentication credentials.
image
Image to be pushed. If just the repository name is passed, then all
tagged images for the specified repo will be pushed. If the image name
is passed in ``repo:tag`` notation, only the specified image will be
pushed.
insecure_registry : False
If ``True``, the Docker client will permit the use of insecure
(non-HTTPS) registries.
api_response : False
If ``True``, an ``API_Response`` key will be present in the return
data, containing the raw output from the Docker API.
client_timeout
Timeout in seconds for the Docker client. This is not a timeout for
this function, but for receiving a response from the API.
**RETURN DATA**
A dictionary will be returned, containing the following keys:
- ``Layers`` - A dictionary containing one or more of the following keys:
- ``Already_Pushed`` - Layers that that were already present on the
Minion
- ``Pushed`` - Layers that that were pushed
- ``Time_Elapsed`` - Time in seconds taken to perform the push
CLI Example:
.. code-block:: bash
salt myminion docker.push myuser/mycontainer
salt myminion docker.push myuser/mycontainer:mytag
]
if <ast.UnaryOp object at 0x7da1b1f79360> begin[:]
variable[image] assign[=] call[name[six].text_type, parameter[name[image]]]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f79ed0>, <ast.Constant object at 0x7da1b1f79960>], [<ast.Constant object at 0x7da1b1f48cd0>, <ast.Name object at 0x7da1b1f48e20>]]
if name[insecure_registry] begin[:]
call[name[kwargs]][constant[insecure_registry]] assign[=] name[insecure_registry]
variable[time_started] assign[=] call[name[time].time, parameter[]]
variable[response] assign[=] call[name[_client_wrapper], parameter[constant[push], name[image]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b20070a0>, <ast.Constant object at 0x7da1b20077f0>], [<ast.BinOp object at 0x7da1b2007a90>, <ast.Constant object at 0x7da1b2007e50>]]
call[name[_clear_context], parameter[]]
if <ast.UnaryOp object at 0x7da1b1f82aa0> begin[:]
<ast.Raise object at 0x7da1b1f82dd0>
variable[errors] assign[=] list[[]]
for taget[name[event]] in starred[name[response]] begin[:]
<ast.Try object at 0x7da1b1f79150>
<ast.Try object at 0x7da1b1f78fa0>
if compare[name[event_type] equal[==] constant[status]] begin[:]
call[name[_push_status], parameter[name[ret], name[event]]]
if name[errors] begin[:]
call[name[ret]][constant[Errors]] assign[=] name[errors]
call[name[ret]][constant[retcode]] assign[=] constant[1]
return[name[ret]] | keyword[def] identifier[push] ( identifier[image] ,
identifier[insecure_registry] = keyword[False] ,
identifier[api_response] = keyword[False] ,
identifier[client_timeout] = identifier[salt] . identifier[utils] . identifier[docker] . identifier[CLIENT_TIMEOUT] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[image] , identifier[six] . identifier[string_types] ):
identifier[image] = identifier[six] . identifier[text_type] ( identifier[image] )
identifier[kwargs] ={ literal[string] : keyword[True] ,
literal[string] : identifier[client_timeout] }
keyword[if] identifier[insecure_registry] :
identifier[kwargs] [ literal[string] ]= identifier[insecure_registry]
identifier[time_started] = identifier[time] . identifier[time] ()
identifier[response] = identifier[_client_wrapper] ( literal[string] , identifier[image] ,** identifier[kwargs] )
identifier[ret] ={ literal[string] : identifier[time] . identifier[time] ()- identifier[time_started] , literal[string] : literal[int] }
identifier[_clear_context] ()
keyword[if] keyword[not] identifier[response] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
. identifier[format] ( identifier[image] )
)
keyword[elif] identifier[api_response] :
identifier[ret] [ literal[string] ]= identifier[response]
identifier[errors] =[]
keyword[for] identifier[event] keyword[in] identifier[response] :
keyword[try] :
identifier[event] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[event] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[event] ),
identifier[info] ={ literal[string] : identifier[exc] . identifier[__str__] ()}
)
keyword[try] :
identifier[event_type] = identifier[next] ( identifier[iter] ( identifier[event] ))
keyword[except] identifier[StopIteration] :
keyword[continue]
keyword[if] identifier[event_type] == literal[string] :
identifier[_push_status] ( identifier[ret] , identifier[event] )
keyword[elif] identifier[event_type] == literal[string] :
identifier[_error_detail] ( identifier[errors] , identifier[event] )
keyword[if] identifier[errors] :
identifier[ret] [ literal[string] ]= identifier[errors]
identifier[ret] [ literal[string] ]= literal[int]
keyword[return] identifier[ret] | def push(image, insecure_registry=False, api_response=False, client_timeout=salt.utils.docker.CLIENT_TIMEOUT):
"""
.. versionchanged:: 2015.8.4
The ``Id`` and ``Image`` keys are no longer present in the return data.
This is due to changes in the Docker Remote API.
Pushes an image to a Docker registry. See the documentation at top of this
page to configure authentication credentials.
image
Image to be pushed. If just the repository name is passed, then all
tagged images for the specified repo will be pushed. If the image name
is passed in ``repo:tag`` notation, only the specified image will be
pushed.
insecure_registry : False
If ``True``, the Docker client will permit the use of insecure
(non-HTTPS) registries.
api_response : False
If ``True``, an ``API_Response`` key will be present in the return
data, containing the raw output from the Docker API.
client_timeout
Timeout in seconds for the Docker client. This is not a timeout for
this function, but for receiving a response from the API.
**RETURN DATA**
A dictionary will be returned, containing the following keys:
- ``Layers`` - A dictionary containing one or more of the following keys:
- ``Already_Pushed`` - Layers that that were already present on the
Minion
- ``Pushed`` - Layers that that were pushed
- ``Time_Elapsed`` - Time in seconds taken to perform the push
CLI Example:
.. code-block:: bash
salt myminion docker.push myuser/mycontainer
salt myminion docker.push myuser/mycontainer:mytag
"""
if not isinstance(image, six.string_types):
image = six.text_type(image) # depends on [control=['if'], data=[]]
kwargs = {'stream': True, 'client_timeout': client_timeout}
if insecure_registry:
kwargs['insecure_registry'] = insecure_registry # depends on [control=['if'], data=[]]
time_started = time.time()
response = _client_wrapper('push', image, **kwargs)
ret = {'Time_Elapsed': time.time() - time_started, 'retcode': 0}
_clear_context()
if not response:
raise CommandExecutionError('Push failed for {0}, no response returned from Docker API'.format(image)) # depends on [control=['if'], data=[]]
elif api_response:
ret['API_Response'] = response # depends on [control=['if'], data=[]]
errors = []
# Iterate through API response and collect information
for event in response:
try:
event = salt.utils.json.loads(event) # depends on [control=['try'], data=[]]
except Exception as exc:
raise CommandExecutionError("Unable to interpret API event: '{0}'".format(event), info={'Error': exc.__str__()}) # depends on [control=['except'], data=['exc']]
try:
event_type = next(iter(event)) # depends on [control=['try'], data=[]]
except StopIteration:
continue # depends on [control=['except'], data=[]]
if event_type == 'status':
_push_status(ret, event) # depends on [control=['if'], data=[]]
elif event_type == 'errorDetail':
_error_detail(errors, event) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']]
if errors:
ret['Errors'] = errors
ret['retcode'] = 1 # depends on [control=['if'], data=[]]
return ret |
def ppo_tiny_world_model():
"""Atari parameters with world model as policy."""
hparams = ppo_original_params()
hparams.policy_network = "next_frame_basic_deterministic"
hparams_keys = hparams.values().keys()
video_hparams = basic_deterministic_params.next_frame_tiny()
for (name, value) in six.iteritems(video_hparams.values()):
if name in hparams_keys:
hparams.set_hparam(name, value)
else:
hparams.add_hparam(name, value)
hparams.weight_decay = 0
return hparams | def function[ppo_tiny_world_model, parameter[]]:
constant[Atari parameters with world model as policy.]
variable[hparams] assign[=] call[name[ppo_original_params], parameter[]]
name[hparams].policy_network assign[=] constant[next_frame_basic_deterministic]
variable[hparams_keys] assign[=] call[call[name[hparams].values, parameter[]].keys, parameter[]]
variable[video_hparams] assign[=] call[name[basic_deterministic_params].next_frame_tiny, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1ff9690>, <ast.Name object at 0x7da1b1ffbca0>]]] in starred[call[name[six].iteritems, parameter[call[name[video_hparams].values, parameter[]]]]] begin[:]
if compare[name[name] in name[hparams_keys]] begin[:]
call[name[hparams].set_hparam, parameter[name[name], name[value]]]
name[hparams].weight_decay assign[=] constant[0]
return[name[hparams]] | keyword[def] identifier[ppo_tiny_world_model] ():
literal[string]
identifier[hparams] = identifier[ppo_original_params] ()
identifier[hparams] . identifier[policy_network] = literal[string]
identifier[hparams_keys] = identifier[hparams] . identifier[values] (). identifier[keys] ()
identifier[video_hparams] = identifier[basic_deterministic_params] . identifier[next_frame_tiny] ()
keyword[for] ( identifier[name] , identifier[value] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[video_hparams] . identifier[values] ()):
keyword[if] identifier[name] keyword[in] identifier[hparams_keys] :
identifier[hparams] . identifier[set_hparam] ( identifier[name] , identifier[value] )
keyword[else] :
identifier[hparams] . identifier[add_hparam] ( identifier[name] , identifier[value] )
identifier[hparams] . identifier[weight_decay] = literal[int]
keyword[return] identifier[hparams] | def ppo_tiny_world_model():
"""Atari parameters with world model as policy."""
hparams = ppo_original_params()
hparams.policy_network = 'next_frame_basic_deterministic'
hparams_keys = hparams.values().keys()
video_hparams = basic_deterministic_params.next_frame_tiny()
for (name, value) in six.iteritems(video_hparams.values()):
if name in hparams_keys:
hparams.set_hparam(name, value) # depends on [control=['if'], data=['name']]
else:
hparams.add_hparam(name, value) # depends on [control=['for'], data=[]]
hparams.weight_decay = 0
return hparams |
def argv(self):
"""Command to start kernels"""
# Python interpreter used to start kernels
if CONF.get('main_interpreter', 'default'):
pyexec = get_python_executable()
else:
# Avoid IPython adding the virtualenv on which Spyder is running
# to the kernel sys.path
os.environ.pop('VIRTUAL_ENV', None)
pyexec = CONF.get('main_interpreter', 'executable')
if not is_python_interpreter(pyexec):
pyexec = get_python_executable()
CONF.set('main_interpreter', 'executable', '')
CONF.set('main_interpreter', 'default', True)
CONF.set('main_interpreter', 'custom', False)
# Fixes Issue #3427
if os.name == 'nt':
dir_pyexec = osp.dirname(pyexec)
pyexec_w = osp.join(dir_pyexec, 'pythonw.exe')
if osp.isfile(pyexec_w):
pyexec = pyexec_w
# Command used to start kernels
kernel_cmd = [
pyexec,
'-m',
'spyder_kernels.console',
'-f',
'{connection_file}'
]
return kernel_cmd | def function[argv, parameter[self]]:
constant[Command to start kernels]
if call[name[CONF].get, parameter[constant[main_interpreter], constant[default]]] begin[:]
variable[pyexec] assign[=] call[name[get_python_executable], parameter[]]
if compare[name[os].name equal[==] constant[nt]] begin[:]
variable[dir_pyexec] assign[=] call[name[osp].dirname, parameter[name[pyexec]]]
variable[pyexec_w] assign[=] call[name[osp].join, parameter[name[dir_pyexec], constant[pythonw.exe]]]
if call[name[osp].isfile, parameter[name[pyexec_w]]] begin[:]
variable[pyexec] assign[=] name[pyexec_w]
variable[kernel_cmd] assign[=] list[[<ast.Name object at 0x7da18dc05210>, <ast.Constant object at 0x7da18dc07550>, <ast.Constant object at 0x7da18dc053f0>, <ast.Constant object at 0x7da18dc04100>, <ast.Constant object at 0x7da18dc04790>]]
return[name[kernel_cmd]] | keyword[def] identifier[argv] ( identifier[self] ):
literal[string]
keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ):
identifier[pyexec] = identifier[get_python_executable] ()
keyword[else] :
identifier[os] . identifier[environ] . identifier[pop] ( literal[string] , keyword[None] )
identifier[pyexec] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[is_python_interpreter] ( identifier[pyexec] ):
identifier[pyexec] = identifier[get_python_executable] ()
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , keyword[True] )
identifier[CONF] . identifier[set] ( literal[string] , literal[string] , keyword[False] )
keyword[if] identifier[os] . identifier[name] == literal[string] :
identifier[dir_pyexec] = identifier[osp] . identifier[dirname] ( identifier[pyexec] )
identifier[pyexec_w] = identifier[osp] . identifier[join] ( identifier[dir_pyexec] , literal[string] )
keyword[if] identifier[osp] . identifier[isfile] ( identifier[pyexec_w] ):
identifier[pyexec] = identifier[pyexec_w]
identifier[kernel_cmd] =[
identifier[pyexec] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
keyword[return] identifier[kernel_cmd] | def argv(self):
"""Command to start kernels"""
# Python interpreter used to start kernels
if CONF.get('main_interpreter', 'default'):
pyexec = get_python_executable() # depends on [control=['if'], data=[]]
else:
# Avoid IPython adding the virtualenv on which Spyder is running
# to the kernel sys.path
os.environ.pop('VIRTUAL_ENV', None)
pyexec = CONF.get('main_interpreter', 'executable')
if not is_python_interpreter(pyexec):
pyexec = get_python_executable()
CONF.set('main_interpreter', 'executable', '')
CONF.set('main_interpreter', 'default', True)
CONF.set('main_interpreter', 'custom', False) # depends on [control=['if'], data=[]]
# Fixes Issue #3427
if os.name == 'nt':
dir_pyexec = osp.dirname(pyexec)
pyexec_w = osp.join(dir_pyexec, 'pythonw.exe')
if osp.isfile(pyexec_w):
pyexec = pyexec_w # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Command used to start kernels
kernel_cmd = [pyexec, '-m', 'spyder_kernels.console', '-f', '{connection_file}']
return kernel_cmd |
def delete_resource(self, resource_name, name):
"""Delete a specific resource by name"""
try:
logger.info("Trying to get %s: '%s'", resource_name, name)
if name is None:
# No name is defined, delete all the resources...
if not self.dry_run:
headers = {
'Content-Type': 'application/json'
}
logger.info("-> deleting all %s", resource_name)
self.backend.delete(resource_name, headers)
logger.info("-> deleted all %s", resource_name)
else:
response = {'_id': '_fake', '_etag': '_fake'}
logger.info("Dry-run mode: should have deleted all %s", resource_name)
else:
params = {'where': json.dumps({'name': name})}
if resource_name in ['host', 'service', 'user']:
params = {'where': json.dumps({'name': name, '_is_template': self.model})}
if resource_name == 'service' and '/' in name:
splitted_name = name.split('/')
name = splitted_name[0] + '_' + splitted_name[1]
# Get host from name
response2 = self.backend.get(
'host', params={'where': json.dumps({'name': splitted_name[0]})})
if response2['_items']:
host = response2['_items'][0]
logger.info("Got host '%s' for the service '%s'",
splitted_name[0], splitted_name[1])
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False
if splitted_name[1] == '*':
params = {'where': json.dumps({'host': host['_id']})}
else:
params = {'where': json.dumps({'name': splitted_name[1],
'host': host['_id']})}
response = self.backend.get_all(resource_name, params=params)
if response['_items']:
logger.info("-> found %d matching %s", len(response['_items']), resource_name)
for item in response['_items']:
logger.info("-> found %s '%s': %s", resource_name, name, item['name'])
# Exists in the backend, we must delete the element...
if not self.dry_run:
headers = {
'Content-Type': 'application/json',
'If-Match': item['_etag']
}
logger.info("-> deleting %s: %s", resource_name, item['name'])
self.backend.delete(resource_name + '/' + item['_id'], headers)
logger.info("-> deleted %s: %s", resource_name, item['name'])
else:
response = {'_id': '_fake', '_etag': '_fake'}
logger.info("Dry-run mode: should have deleted an %s '%s'",
resource_name, name)
logger.info("-> deleted: '%s': %s",
resource_name, item['_id'])
else:
logger.warning("-> %s item '%s' not found", resource_name, name)
return False
except BackendException as exp: # pragma: no cover, should never happen
logger.exception("Exception: %s", exp)
logger.error("Response: %s", exp.response)
print("Deletion error for '%s' : %s" % (resource_name, name))
print("~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Exiting with error code: 5")
return False
return True | def function[delete_resource, parameter[self, resource_name, name]]:
constant[Delete a specific resource by name]
<ast.Try object at 0x7da1b01b9c60>
return[constant[True]] | keyword[def] identifier[delete_resource] ( identifier[self] , identifier[resource_name] , identifier[name] ):
literal[string]
keyword[try] :
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] )
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[self] . identifier[dry_run] :
identifier[headers] ={
literal[string] : literal[string]
}
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] )
identifier[self] . identifier[backend] . identifier[delete] ( identifier[resource_name] , identifier[headers] )
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] )
keyword[else] :
identifier[response] ={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] )
keyword[else] :
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[name] })}
keyword[if] identifier[resource_name] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[name] , literal[string] : identifier[self] . identifier[model] })}
keyword[if] identifier[resource_name] == literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[splitted_name] = identifier[name] . identifier[split] ( literal[string] )
identifier[name] = identifier[splitted_name] [ literal[int] ]+ literal[string] + identifier[splitted_name] [ literal[int] ]
identifier[response2] = identifier[self] . identifier[backend] . identifier[get] (
literal[string] , identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[splitted_name] [ literal[int] ]})})
keyword[if] identifier[response2] [ literal[string] ]:
identifier[host] = identifier[response2] [ literal[string] ][ literal[int] ]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[splitted_name] [ literal[int] ], identifier[splitted_name] [ literal[int] ])
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[splitted_name] [ literal[int] ])
keyword[return] keyword[False]
keyword[if] identifier[splitted_name] [ literal[int] ]== literal[string] :
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[host] [ literal[string] ]})}
keyword[else] :
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[splitted_name] [ literal[int] ],
literal[string] : identifier[host] [ literal[string] ]})}
identifier[response] = identifier[self] . identifier[backend] . identifier[get_all] ( identifier[resource_name] , identifier[params] = identifier[params] )
keyword[if] identifier[response] [ literal[string] ]:
identifier[logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[response] [ literal[string] ]), identifier[resource_name] )
keyword[for] identifier[item] keyword[in] identifier[response] [ literal[string] ]:
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] , identifier[item] [ literal[string] ])
keyword[if] keyword[not] identifier[self] . identifier[dry_run] :
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : identifier[item] [ literal[string] ]
}
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[item] [ literal[string] ])
identifier[self] . identifier[backend] . identifier[delete] ( identifier[resource_name] + literal[string] + identifier[item] [ literal[string] ], identifier[headers] )
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[item] [ literal[string] ])
keyword[else] :
identifier[response] ={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[logger] . identifier[info] ( literal[string] ,
identifier[resource_name] , identifier[name] )
identifier[logger] . identifier[info] ( literal[string] ,
identifier[resource_name] , identifier[item] [ literal[string] ])
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[resource_name] , identifier[name] )
keyword[return] keyword[False]
keyword[except] identifier[BackendException] keyword[as] identifier[exp] :
identifier[logger] . identifier[exception] ( literal[string] , identifier[exp] )
identifier[logger] . identifier[error] ( literal[string] , identifier[exp] . identifier[response] )
identifier[print] ( literal[string] %( identifier[resource_name] , identifier[name] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def delete_resource(self, resource_name, name):
"""Delete a specific resource by name"""
try:
logger.info("Trying to get %s: '%s'", resource_name, name)
if name is None:
# No name is defined, delete all the resources...
if not self.dry_run:
headers = {'Content-Type': 'application/json'}
logger.info('-> deleting all %s', resource_name)
self.backend.delete(resource_name, headers)
logger.info('-> deleted all %s', resource_name) # depends on [control=['if'], data=[]]
else:
response = {'_id': '_fake', '_etag': '_fake'}
logger.info('Dry-run mode: should have deleted all %s', resource_name) # depends on [control=['if'], data=[]]
else:
params = {'where': json.dumps({'name': name})}
if resource_name in ['host', 'service', 'user']:
params = {'where': json.dumps({'name': name, '_is_template': self.model})} # depends on [control=['if'], data=[]]
if resource_name == 'service' and '/' in name:
splitted_name = name.split('/')
name = splitted_name[0] + '_' + splitted_name[1]
# Get host from name
response2 = self.backend.get('host', params={'where': json.dumps({'name': splitted_name[0]})})
if response2['_items']:
host = response2['_items'][0]
logger.info("Got host '%s' for the service '%s'", splitted_name[0], splitted_name[1]) # depends on [control=['if'], data=[]]
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False
if splitted_name[1] == '*':
params = {'where': json.dumps({'host': host['_id']})} # depends on [control=['if'], data=[]]
else:
params = {'where': json.dumps({'name': splitted_name[1], 'host': host['_id']})} # depends on [control=['if'], data=[]]
response = self.backend.get_all(resource_name, params=params)
if response['_items']:
logger.info('-> found %d matching %s', len(response['_items']), resource_name)
for item in response['_items']:
logger.info("-> found %s '%s': %s", resource_name, name, item['name'])
# Exists in the backend, we must delete the element...
if not self.dry_run:
headers = {'Content-Type': 'application/json', 'If-Match': item['_etag']}
logger.info('-> deleting %s: %s', resource_name, item['name'])
self.backend.delete(resource_name + '/' + item['_id'], headers)
logger.info('-> deleted %s: %s', resource_name, item['name']) # depends on [control=['if'], data=[]]
else:
response = {'_id': '_fake', '_etag': '_fake'}
logger.info("Dry-run mode: should have deleted an %s '%s'", resource_name, name)
logger.info("-> deleted: '%s': %s", resource_name, item['_id']) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
logger.warning("-> %s item '%s' not found", resource_name, name)
return False # depends on [control=['try'], data=[]]
except BackendException as exp: # pragma: no cover, should never happen
logger.exception('Exception: %s', exp)
logger.error('Response: %s', exp.response)
print("Deletion error for '%s' : %s" % (resource_name, name))
print('~~~~~~~~~~~~~~~~~~~~~~~~~~')
print('Exiting with error code: 5')
return False # depends on [control=['except'], data=['exp']]
return True |
def bridge_create(br, may_exist=True, parent=None, vlan=None):
'''
Creates a new bridge.
Args:
br: A string - bridge name
may_exist: Bool, if False - attempting to create a bridge that exists returns False.
parent: String, the name of the parent bridge (if the bridge shall be
created as a fake bridge). If specified, vlan must also be
specified.
vlan: Int, the VLAN ID of the bridge (if the bridge shall be created as
a fake bridge). If specified, parent must also be specified.
Returns:
True on success, else False.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_create br0
'''
param_may_exist = _param_may_exist(may_exist)
if parent is not None and vlan is None:
raise ArgumentValueError(
'If parent is specified, vlan must also be specified.')
if vlan is not None and parent is None:
raise ArgumentValueError(
'If vlan is specified, parent must also be specified.')
param_parent = '' if parent is None else ' {0}'.format(parent)
param_vlan = '' if vlan is None else ' {0}'.format(vlan)
cmd = 'ovs-vsctl {1}add-br {0}{2}{3}'.format(br, param_may_exist, param_parent,
param_vlan)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode']) | def function[bridge_create, parameter[br, may_exist, parent, vlan]]:
constant[
Creates a new bridge.
Args:
br: A string - bridge name
may_exist: Bool, if False - attempting to create a bridge that exists returns False.
parent: String, the name of the parent bridge (if the bridge shall be
created as a fake bridge). If specified, vlan must also be
specified.
vlan: Int, the VLAN ID of the bridge (if the bridge shall be created as
a fake bridge). If specified, parent must also be specified.
Returns:
True on success, else False.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_create br0
]
variable[param_may_exist] assign[=] call[name[_param_may_exist], parameter[name[may_exist]]]
if <ast.BoolOp object at 0x7da1b1c99120> begin[:]
<ast.Raise object at 0x7da1b1c997e0>
if <ast.BoolOp object at 0x7da1b1c9aa70> begin[:]
<ast.Raise object at 0x7da1b1c9a770>
variable[param_parent] assign[=] <ast.IfExp object at 0x7da1b1c9a7d0>
variable[param_vlan] assign[=] <ast.IfExp object at 0x7da1b1c99870>
variable[cmd] assign[=] call[constant[ovs-vsctl {1}add-br {0}{2}{3}].format, parameter[name[br], name[param_may_exist], name[param_parent], name[param_vlan]]]
variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
return[call[name[_retcode_to_bool], parameter[call[name[result]][constant[retcode]]]]] | keyword[def] identifier[bridge_create] ( identifier[br] , identifier[may_exist] = keyword[True] , identifier[parent] = keyword[None] , identifier[vlan] = keyword[None] ):
literal[string]
identifier[param_may_exist] = identifier[_param_may_exist] ( identifier[may_exist] )
keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] keyword[and] identifier[vlan] keyword[is] keyword[None] :
keyword[raise] identifier[ArgumentValueError] (
literal[string] )
keyword[if] identifier[vlan] keyword[is] keyword[not] keyword[None] keyword[and] identifier[parent] keyword[is] keyword[None] :
keyword[raise] identifier[ArgumentValueError] (
literal[string] )
identifier[param_parent] = literal[string] keyword[if] identifier[parent] keyword[is] keyword[None] keyword[else] literal[string] . identifier[format] ( identifier[parent] )
identifier[param_vlan] = literal[string] keyword[if] identifier[vlan] keyword[is] keyword[None] keyword[else] literal[string] . identifier[format] ( identifier[vlan] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[br] , identifier[param_may_exist] , identifier[param_parent] ,
identifier[param_vlan] )
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[cmd] )
keyword[return] identifier[_retcode_to_bool] ( identifier[result] [ literal[string] ]) | def bridge_create(br, may_exist=True, parent=None, vlan=None):
"""
Creates a new bridge.
Args:
br: A string - bridge name
may_exist: Bool, if False - attempting to create a bridge that exists returns False.
parent: String, the name of the parent bridge (if the bridge shall be
created as a fake bridge). If specified, vlan must also be
specified.
vlan: Int, the VLAN ID of the bridge (if the bridge shall be created as
a fake bridge). If specified, parent must also be specified.
Returns:
True on success, else False.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_create br0
"""
param_may_exist = _param_may_exist(may_exist)
if parent is not None and vlan is None:
raise ArgumentValueError('If parent is specified, vlan must also be specified.') # depends on [control=['if'], data=[]]
if vlan is not None and parent is None:
raise ArgumentValueError('If vlan is specified, parent must also be specified.') # depends on [control=['if'], data=[]]
param_parent = '' if parent is None else ' {0}'.format(parent)
param_vlan = '' if vlan is None else ' {0}'.format(vlan)
cmd = 'ovs-vsctl {1}add-br {0}{2}{3}'.format(br, param_may_exist, param_parent, param_vlan)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode']) |
def validate_process_steps(prop, value):
""" Default validation for Process Steps data structure """
if value is not None:
validate_type(prop, value, (dict, list))
procstep_keys = set(_complex_definitions[prop])
for idx, procstep in enumerate(wrap_value(value)):
ps_idx = prop + '[' + str(idx) + ']'
validate_type(ps_idx, procstep, dict)
for ps_prop, ps_val in iteritems(procstep):
ps_key = '.'.join((ps_idx, ps_prop))
if ps_prop not in procstep_keys:
_validation_error(prop, None, value, ('keys: {0}'.format(','.join(procstep_keys))))
if ps_prop != 'sources':
validate_type(ps_key, ps_val, string_types)
else:
validate_type(ps_key, ps_val, (string_types, list))
for src_idx, src_val in enumerate(wrap_value(ps_val)):
src_key = ps_key + '[' + str(src_idx) + ']'
validate_type(src_key, src_val, string_types) | def function[validate_process_steps, parameter[prop, value]]:
constant[ Default validation for Process Steps data structure ]
if compare[name[value] is_not constant[None]] begin[:]
call[name[validate_type], parameter[name[prop], name[value], tuple[[<ast.Name object at 0x7da20c6abbb0>, <ast.Name object at 0x7da20c6abaf0>]]]]
variable[procstep_keys] assign[=] call[name[set], parameter[call[name[_complex_definitions]][name[prop]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6aafe0>, <ast.Name object at 0x7da20c6a81c0>]]] in starred[call[name[enumerate], parameter[call[name[wrap_value], parameter[name[value]]]]]] begin[:]
variable[ps_idx] assign[=] binary_operation[binary_operation[binary_operation[name[prop] + constant[[]] + call[name[str], parameter[name[idx]]]] + constant[]]]
call[name[validate_type], parameter[name[ps_idx], name[procstep], name[dict]]]
for taget[tuple[[<ast.Name object at 0x7da20c6ab1c0>, <ast.Name object at 0x7da20c6a8760>]]] in starred[call[name[iteritems], parameter[name[procstep]]]] begin[:]
variable[ps_key] assign[=] call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da20c6aace0>, <ast.Name object at 0x7da20c6abfd0>]]]]
if compare[name[ps_prop] <ast.NotIn object at 0x7da2590d7190> name[procstep_keys]] begin[:]
call[name[_validation_error], parameter[name[prop], constant[None], name[value], call[constant[keys: {0}].format, parameter[call[constant[,].join, parameter[name[procstep_keys]]]]]]]
if compare[name[ps_prop] not_equal[!=] constant[sources]] begin[:]
call[name[validate_type], parameter[name[ps_key], name[ps_val], name[string_types]]] | keyword[def] identifier[validate_process_steps] ( identifier[prop] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[validate_type] ( identifier[prop] , identifier[value] ,( identifier[dict] , identifier[list] ))
identifier[procstep_keys] = identifier[set] ( identifier[_complex_definitions] [ identifier[prop] ])
keyword[for] identifier[idx] , identifier[procstep] keyword[in] identifier[enumerate] ( identifier[wrap_value] ( identifier[value] )):
identifier[ps_idx] = identifier[prop] + literal[string] + identifier[str] ( identifier[idx] )+ literal[string]
identifier[validate_type] ( identifier[ps_idx] , identifier[procstep] , identifier[dict] )
keyword[for] identifier[ps_prop] , identifier[ps_val] keyword[in] identifier[iteritems] ( identifier[procstep] ):
identifier[ps_key] = literal[string] . identifier[join] (( identifier[ps_idx] , identifier[ps_prop] ))
keyword[if] identifier[ps_prop] keyword[not] keyword[in] identifier[procstep_keys] :
identifier[_validation_error] ( identifier[prop] , keyword[None] , identifier[value] ,( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[procstep_keys] ))))
keyword[if] identifier[ps_prop] != literal[string] :
identifier[validate_type] ( identifier[ps_key] , identifier[ps_val] , identifier[string_types] )
keyword[else] :
identifier[validate_type] ( identifier[ps_key] , identifier[ps_val] ,( identifier[string_types] , identifier[list] ))
keyword[for] identifier[src_idx] , identifier[src_val] keyword[in] identifier[enumerate] ( identifier[wrap_value] ( identifier[ps_val] )):
identifier[src_key] = identifier[ps_key] + literal[string] + identifier[str] ( identifier[src_idx] )+ literal[string]
identifier[validate_type] ( identifier[src_key] , identifier[src_val] , identifier[string_types] ) | def validate_process_steps(prop, value):
""" Default validation for Process Steps data structure """
if value is not None:
validate_type(prop, value, (dict, list))
procstep_keys = set(_complex_definitions[prop])
for (idx, procstep) in enumerate(wrap_value(value)):
ps_idx = prop + '[' + str(idx) + ']'
validate_type(ps_idx, procstep, dict)
for (ps_prop, ps_val) in iteritems(procstep):
ps_key = '.'.join((ps_idx, ps_prop))
if ps_prop not in procstep_keys:
_validation_error(prop, None, value, 'keys: {0}'.format(','.join(procstep_keys))) # depends on [control=['if'], data=['procstep_keys']]
if ps_prop != 'sources':
validate_type(ps_key, ps_val, string_types) # depends on [control=['if'], data=[]]
else:
validate_type(ps_key, ps_val, (string_types, list))
for (src_idx, src_val) in enumerate(wrap_value(ps_val)):
src_key = ps_key + '[' + str(src_idx) + ']'
validate_type(src_key, src_val, string_types) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['value']] |
def read_header(self):
""" Reads the head of the las file and returns it
"""
self.stream.seek(self.start_pos)
return headers.HeaderFactory().read_from_stream(self.stream) | def function[read_header, parameter[self]]:
constant[ Reads the head of the las file and returns it
]
call[name[self].stream.seek, parameter[name[self].start_pos]]
return[call[call[name[headers].HeaderFactory, parameter[]].read_from_stream, parameter[name[self].stream]]] | keyword[def] identifier[read_header] ( identifier[self] ):
literal[string]
identifier[self] . identifier[stream] . identifier[seek] ( identifier[self] . identifier[start_pos] )
keyword[return] identifier[headers] . identifier[HeaderFactory] (). identifier[read_from_stream] ( identifier[self] . identifier[stream] ) | def read_header(self):
""" Reads the head of the las file and returns it
"""
self.stream.seek(self.start_pos)
return headers.HeaderFactory().read_from_stream(self.stream) |
def do_confusion_matrix(sorting1, sorting2, unit_map12, labels_st1, labels_st2):
"""
Compute the confusion matrix between two sorting.
Parameters
----------
sorting1: SortingExtractor instance
The ground truth sorting.
sorting2: SortingExtractor instance
The tested sorting.
unit_map12: dict
Dict of matching from sorting1 to sorting2.
Output
----------
confusion_matrix: the confusion matrix
st1_idxs: order of units1 in confusion matrix
st2_idxs: order of units2 in confusion matrix
"""
unit1_ids = np.array(sorting1.get_unit_ids())
unit2_ids = np.array(sorting2.get_unit_ids())
N1 = len(unit1_ids)
N2 = len(unit2_ids)
conf_matrix = np.zeros((N1 + 1, N2 + 1), dtype=int)
mapped_units = np.array(list(unit_map12.values()))
idxs_matched, = np.where(mapped_units != -1)
idxs_unmatched, = np.where(mapped_units == -1)
unit_map_matched = mapped_units[idxs_matched]
st1_idxs =np.hstack([unit1_ids[idxs_matched], unit1_ids[idxs_unmatched]])
st2_matched = unit_map_matched
st2_unmatched = []
for u_i, u1 in enumerate(unit1_ids[idxs_matched]):
lab_st1 = labels_st1[u1]
tp = len(np.where('TP' == lab_st1)[0])
conf_matrix[u_i, u_i] = int(tp)
for u_j, u2 in enumerate(unit2_ids):
lab_st2 = labels_st2[u2]
cl_str = str(u1) + '_' + str(u2)
cl = len([i for i, v in enumerate(lab_st1) if 'CL' in v and cl_str in v])
if cl != 0:
st_p, = np.where(u2 == unit_map_matched)
conf_matrix[u_i, st_p] = int(cl)
fn = len(np.where('FN' == lab_st1)[0])
conf_matrix[u_i, -1] = int(fn)
for u_i, u1 in enumerate(unit1_ids[idxs_unmatched]):
lab_st1 = labels_st1[u1]
fn = len(np.where('FN' == lab_st1)[0])
conf_matrix[u_i + len(idxs_matched), -1] = int(fn)
for u_j, u2 in enumerate(unit2_ids):
lab_st2 = labels_st2[u2]
fp = len(np.where('FP' == lab_st2)[0])
st_p, = np.where(u2 == unit_map_matched)
if len(st_p) != 0:
conf_matrix[-1, st_p] = int(fp)
else:
st2_unmatched.append(int(u2))
conf_matrix[-1, len(idxs_matched) + len(st2_unmatched) - 1] = int(fp)
st2_idxs = np.hstack([st2_matched, st2_unmatched]).astype('int64')
return conf_matrix, st1_idxs, st2_idxs | def function[do_confusion_matrix, parameter[sorting1, sorting2, unit_map12, labels_st1, labels_st2]]:
constant[
Compute the confusion matrix between two sorting.
Parameters
----------
sorting1: SortingExtractor instance
The ground truth sorting.
sorting2: SortingExtractor instance
The tested sorting.
unit_map12: dict
Dict of matching from sorting1 to sorting2.
Output
----------
confusion_matrix: the confusion matrix
st1_idxs: order of units1 in confusion matrix
st2_idxs: order of units2 in confusion matrix
]
variable[unit1_ids] assign[=] call[name[np].array, parameter[call[name[sorting1].get_unit_ids, parameter[]]]]
variable[unit2_ids] assign[=] call[name[np].array, parameter[call[name[sorting2].get_unit_ids, parameter[]]]]
variable[N1] assign[=] call[name[len], parameter[name[unit1_ids]]]
variable[N2] assign[=] call[name[len], parameter[name[unit2_ids]]]
variable[conf_matrix] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da1b102b790>, <ast.BinOp object at 0x7da1b102b700>]]]]
variable[mapped_units] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[unit_map12].values, parameter[]]]]]]
<ast.Tuple object at 0x7da1b102b400> assign[=] call[name[np].where, parameter[compare[name[mapped_units] not_equal[!=] <ast.UnaryOp object at 0x7da1b102b2b0>]]]
<ast.Tuple object at 0x7da1b102b220> assign[=] call[name[np].where, parameter[compare[name[mapped_units] equal[==] <ast.UnaryOp object at 0x7da1b102b0d0>]]]
variable[unit_map_matched] assign[=] call[name[mapped_units]][name[idxs_matched]]
variable[st1_idxs] assign[=] call[name[np].hstack, parameter[list[[<ast.Subscript object at 0x7da1b102ae60>, <ast.Subscript object at 0x7da1b102add0>]]]]
variable[st2_matched] assign[=] name[unit_map_matched]
variable[st2_unmatched] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b102abc0>, <ast.Name object at 0x7da1b102ab90>]]] in starred[call[name[enumerate], parameter[call[name[unit1_ids]][name[idxs_matched]]]]] begin[:]
variable[lab_st1] assign[=] call[name[labels_st1]][name[u1]]
variable[tp] assign[=] call[name[len], parameter[call[call[name[np].where, parameter[compare[constant[TP] equal[==] name[lab_st1]]]]][constant[0]]]]
call[name[conf_matrix]][tuple[[<ast.Name object at 0x7da1b102a650>, <ast.Name object at 0x7da1b102a620>]]] assign[=] call[name[int], parameter[name[tp]]]
for taget[tuple[[<ast.Name object at 0x7da1b102a500>, <ast.Name object at 0x7da1b102a4d0>]]] in starred[call[name[enumerate], parameter[name[unit2_ids]]]] begin[:]
variable[lab_st2] assign[=] call[name[labels_st2]][name[u2]]
variable[cl_str] assign[=] binary_operation[binary_operation[call[name[str], parameter[name[u1]]] + constant[_]] + call[name[str], parameter[name[u2]]]]
variable[cl] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b102a050>]]
if compare[name[cl] not_equal[!=] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b1029c30> assign[=] call[name[np].where, parameter[compare[name[u2] equal[==] name[unit_map_matched]]]]
call[name[conf_matrix]][tuple[[<ast.Name object at 0x7da1b10285b0>, <ast.Name object at 0x7da1b10285e0>]]] assign[=] call[name[int], parameter[name[cl]]]
variable[fn] assign[=] call[name[len], parameter[call[call[name[np].where, parameter[compare[constant[FN] equal[==] name[lab_st1]]]]][constant[0]]]]
call[name[conf_matrix]][tuple[[<ast.Name object at 0x7da1b1029ae0>, <ast.UnaryOp object at 0x7da1b1029ab0>]]] assign[=] call[name[int], parameter[name[fn]]]
for taget[tuple[[<ast.Name object at 0x7da1b1029960>, <ast.Name object at 0x7da1b1029930>]]] in starred[call[name[enumerate], parameter[call[name[unit1_ids]][name[idxs_unmatched]]]]] begin[:]
variable[lab_st1] assign[=] call[name[labels_st1]][name[u1]]
variable[fn] assign[=] call[name[len], parameter[call[call[name[np].where, parameter[compare[constant[FN] equal[==] name[lab_st1]]]]][constant[0]]]]
call[name[conf_matrix]][tuple[[<ast.BinOp object at 0x7da1b1029420>, <ast.UnaryOp object at 0x7da1b1029330>]]] assign[=] call[name[int], parameter[name[fn]]]
for taget[tuple[[<ast.Name object at 0x7da1b10291e0>, <ast.Name object at 0x7da1b10291b0>]]] in starred[call[name[enumerate], parameter[name[unit2_ids]]]] begin[:]
variable[lab_st2] assign[=] call[name[labels_st2]][name[u2]]
variable[fp] assign[=] call[name[len], parameter[call[call[name[np].where, parameter[compare[constant[FP] equal[==] name[lab_st2]]]]][constant[0]]]]
<ast.Tuple object at 0x7da1b1028d90> assign[=] call[name[np].where, parameter[compare[name[u2] equal[==] name[unit_map_matched]]]]
if compare[call[name[len], parameter[name[st_p]]] not_equal[!=] constant[0]] begin[:]
call[name[conf_matrix]][tuple[[<ast.UnaryOp object at 0x7da1b1028a30>, <ast.Name object at 0x7da1b10289d0>]]] assign[=] call[name[int], parameter[name[fp]]]
variable[st2_idxs] assign[=] call[call[name[np].hstack, parameter[list[[<ast.Name object at 0x7da1b124e140>, <ast.Name object at 0x7da1b124c9a0>]]]].astype, parameter[constant[int64]]]
return[tuple[[<ast.Name object at 0x7da1b1115c00>, <ast.Name object at 0x7da1b1116200>, <ast.Name object at 0x7da1b11176d0>]]] | keyword[def] identifier[do_confusion_matrix] ( identifier[sorting1] , identifier[sorting2] , identifier[unit_map12] , identifier[labels_st1] , identifier[labels_st2] ):
literal[string]
identifier[unit1_ids] = identifier[np] . identifier[array] ( identifier[sorting1] . identifier[get_unit_ids] ())
identifier[unit2_ids] = identifier[np] . identifier[array] ( identifier[sorting2] . identifier[get_unit_ids] ())
identifier[N1] = identifier[len] ( identifier[unit1_ids] )
identifier[N2] = identifier[len] ( identifier[unit2_ids] )
identifier[conf_matrix] = identifier[np] . identifier[zeros] (( identifier[N1] + literal[int] , identifier[N2] + literal[int] ), identifier[dtype] = identifier[int] )
identifier[mapped_units] = identifier[np] . identifier[array] ( identifier[list] ( identifier[unit_map12] . identifier[values] ()))
identifier[idxs_matched] ,= identifier[np] . identifier[where] ( identifier[mapped_units] !=- literal[int] )
identifier[idxs_unmatched] ,= identifier[np] . identifier[where] ( identifier[mapped_units] ==- literal[int] )
identifier[unit_map_matched] = identifier[mapped_units] [ identifier[idxs_matched] ]
identifier[st1_idxs] = identifier[np] . identifier[hstack] ([ identifier[unit1_ids] [ identifier[idxs_matched] ], identifier[unit1_ids] [ identifier[idxs_unmatched] ]])
identifier[st2_matched] = identifier[unit_map_matched]
identifier[st2_unmatched] =[]
keyword[for] identifier[u_i] , identifier[u1] keyword[in] identifier[enumerate] ( identifier[unit1_ids] [ identifier[idxs_matched] ]):
identifier[lab_st1] = identifier[labels_st1] [ identifier[u1] ]
identifier[tp] = identifier[len] ( identifier[np] . identifier[where] ( literal[string] == identifier[lab_st1] )[ literal[int] ])
identifier[conf_matrix] [ identifier[u_i] , identifier[u_i] ]= identifier[int] ( identifier[tp] )
keyword[for] identifier[u_j] , identifier[u2] keyword[in] identifier[enumerate] ( identifier[unit2_ids] ):
identifier[lab_st2] = identifier[labels_st2] [ identifier[u2] ]
identifier[cl_str] = identifier[str] ( identifier[u1] )+ literal[string] + identifier[str] ( identifier[u2] )
identifier[cl] = identifier[len] ([ identifier[i] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[lab_st1] ) keyword[if] literal[string] keyword[in] identifier[v] keyword[and] identifier[cl_str] keyword[in] identifier[v] ])
keyword[if] identifier[cl] != literal[int] :
identifier[st_p] ,= identifier[np] . identifier[where] ( identifier[u2] == identifier[unit_map_matched] )
identifier[conf_matrix] [ identifier[u_i] , identifier[st_p] ]= identifier[int] ( identifier[cl] )
identifier[fn] = identifier[len] ( identifier[np] . identifier[where] ( literal[string] == identifier[lab_st1] )[ literal[int] ])
identifier[conf_matrix] [ identifier[u_i] ,- literal[int] ]= identifier[int] ( identifier[fn] )
keyword[for] identifier[u_i] , identifier[u1] keyword[in] identifier[enumerate] ( identifier[unit1_ids] [ identifier[idxs_unmatched] ]):
identifier[lab_st1] = identifier[labels_st1] [ identifier[u1] ]
identifier[fn] = identifier[len] ( identifier[np] . identifier[where] ( literal[string] == identifier[lab_st1] )[ literal[int] ])
identifier[conf_matrix] [ identifier[u_i] + identifier[len] ( identifier[idxs_matched] ),- literal[int] ]= identifier[int] ( identifier[fn] )
keyword[for] identifier[u_j] , identifier[u2] keyword[in] identifier[enumerate] ( identifier[unit2_ids] ):
identifier[lab_st2] = identifier[labels_st2] [ identifier[u2] ]
identifier[fp] = identifier[len] ( identifier[np] . identifier[where] ( literal[string] == identifier[lab_st2] )[ literal[int] ])
identifier[st_p] ,= identifier[np] . identifier[where] ( identifier[u2] == identifier[unit_map_matched] )
keyword[if] identifier[len] ( identifier[st_p] )!= literal[int] :
identifier[conf_matrix] [- literal[int] , identifier[st_p] ]= identifier[int] ( identifier[fp] )
keyword[else] :
identifier[st2_unmatched] . identifier[append] ( identifier[int] ( identifier[u2] ))
identifier[conf_matrix] [- literal[int] , identifier[len] ( identifier[idxs_matched] )+ identifier[len] ( identifier[st2_unmatched] )- literal[int] ]= identifier[int] ( identifier[fp] )
identifier[st2_idxs] = identifier[np] . identifier[hstack] ([ identifier[st2_matched] , identifier[st2_unmatched] ]). identifier[astype] ( literal[string] )
keyword[return] identifier[conf_matrix] , identifier[st1_idxs] , identifier[st2_idxs] | def do_confusion_matrix(sorting1, sorting2, unit_map12, labels_st1, labels_st2):
"""
Compute the confusion matrix between two sorting.
Parameters
----------
sorting1: SortingExtractor instance
The ground truth sorting.
sorting2: SortingExtractor instance
The tested sorting.
unit_map12: dict
Dict of matching from sorting1 to sorting2.
Output
----------
confusion_matrix: the confusion matrix
st1_idxs: order of units1 in confusion matrix
st2_idxs: order of units2 in confusion matrix
"""
unit1_ids = np.array(sorting1.get_unit_ids())
unit2_ids = np.array(sorting2.get_unit_ids())
N1 = len(unit1_ids)
N2 = len(unit2_ids)
conf_matrix = np.zeros((N1 + 1, N2 + 1), dtype=int)
mapped_units = np.array(list(unit_map12.values()))
(idxs_matched,) = np.where(mapped_units != -1)
(idxs_unmatched,) = np.where(mapped_units == -1)
unit_map_matched = mapped_units[idxs_matched]
st1_idxs = np.hstack([unit1_ids[idxs_matched], unit1_ids[idxs_unmatched]])
st2_matched = unit_map_matched
st2_unmatched = []
for (u_i, u1) in enumerate(unit1_ids[idxs_matched]):
lab_st1 = labels_st1[u1]
tp = len(np.where('TP' == lab_st1)[0])
conf_matrix[u_i, u_i] = int(tp)
for (u_j, u2) in enumerate(unit2_ids):
lab_st2 = labels_st2[u2]
cl_str = str(u1) + '_' + str(u2)
cl = len([i for (i, v) in enumerate(lab_st1) if 'CL' in v and cl_str in v])
if cl != 0:
(st_p,) = np.where(u2 == unit_map_matched)
conf_matrix[u_i, st_p] = int(cl) # depends on [control=['if'], data=['cl']] # depends on [control=['for'], data=[]]
fn = len(np.where('FN' == lab_st1)[0])
conf_matrix[u_i, -1] = int(fn) # depends on [control=['for'], data=[]]
for (u_i, u1) in enumerate(unit1_ids[idxs_unmatched]):
lab_st1 = labels_st1[u1]
fn = len(np.where('FN' == lab_st1)[0])
conf_matrix[u_i + len(idxs_matched), -1] = int(fn) # depends on [control=['for'], data=[]]
for (u_j, u2) in enumerate(unit2_ids):
lab_st2 = labels_st2[u2]
fp = len(np.where('FP' == lab_st2)[0])
(st_p,) = np.where(u2 == unit_map_matched)
if len(st_p) != 0:
conf_matrix[-1, st_p] = int(fp) # depends on [control=['if'], data=[]]
else:
st2_unmatched.append(int(u2))
conf_matrix[-1, len(idxs_matched) + len(st2_unmatched) - 1] = int(fp) # depends on [control=['for'], data=[]]
st2_idxs = np.hstack([st2_matched, st2_unmatched]).astype('int64')
return (conf_matrix, st1_idxs, st2_idxs) |
def _revoke(self, http):
"""Revokes this credential and deletes the stored copy (if it exists).
Args:
http: an object to be used to make HTTP requests.
"""
self._do_revoke(http, self.refresh_token or self.access_token) | def function[_revoke, parameter[self, http]]:
constant[Revokes this credential and deletes the stored copy (if it exists).
Args:
http: an object to be used to make HTTP requests.
]
call[name[self]._do_revoke, parameter[name[http], <ast.BoolOp object at 0x7da1b01fba00>]] | keyword[def] identifier[_revoke] ( identifier[self] , identifier[http] ):
literal[string]
identifier[self] . identifier[_do_revoke] ( identifier[http] , identifier[self] . identifier[refresh_token] keyword[or] identifier[self] . identifier[access_token] ) | def _revoke(self, http):
"""Revokes this credential and deletes the stored copy (if it exists).
Args:
http: an object to be used to make HTTP requests.
"""
self._do_revoke(http, self.refresh_token or self.access_token) |
def set_font(self, font, option):
"""Set global font used in Spyder."""
# Update fonts in all plugins
set_font(font, option=option)
plugins = self.main.widgetlist + self.main.thirdparty_plugins
for plugin in plugins:
plugin.update_font() | def function[set_font, parameter[self, font, option]]:
constant[Set global font used in Spyder.]
call[name[set_font], parameter[name[font]]]
variable[plugins] assign[=] binary_operation[name[self].main.widgetlist + name[self].main.thirdparty_plugins]
for taget[name[plugin]] in starred[name[plugins]] begin[:]
call[name[plugin].update_font, parameter[]] | keyword[def] identifier[set_font] ( identifier[self] , identifier[font] , identifier[option] ):
literal[string]
identifier[set_font] ( identifier[font] , identifier[option] = identifier[option] )
identifier[plugins] = identifier[self] . identifier[main] . identifier[widgetlist] + identifier[self] . identifier[main] . identifier[thirdparty_plugins]
keyword[for] identifier[plugin] keyword[in] identifier[plugins] :
identifier[plugin] . identifier[update_font] () | def set_font(self, font, option):
"""Set global font used in Spyder."""
# Update fonts in all plugins
set_font(font, option=option)
plugins = self.main.widgetlist + self.main.thirdparty_plugins
for plugin in plugins:
plugin.update_font() # depends on [control=['for'], data=['plugin']] |
def _get_brew_tap_specific_commands(brew_path_prefix):
"""To get tap's specific commands
https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115"""
commands = []
brew_taps_path = brew_path_prefix + TAP_PATH
for user in _get_directory_names_only(brew_taps_path):
taps = _get_directory_names_only(brew_taps_path + '/%s' % user)
# Brew Taps's naming rule
# https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations
taps = (tap for tap in taps if tap.startswith('homebrew-'))
for tap in taps:
tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap)
if os.path.isdir(tap_cmd_path):
commands += (name.replace('brew-', '').replace('.rb', '')
for name in os.listdir(tap_cmd_path)
if _is_brew_tap_cmd_naming(name))
return commands | def function[_get_brew_tap_specific_commands, parameter[brew_path_prefix]]:
constant[To get tap's specific commands
https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115]
variable[commands] assign[=] list[[]]
variable[brew_taps_path] assign[=] binary_operation[name[brew_path_prefix] + name[TAP_PATH]]
for taget[name[user]] in starred[call[name[_get_directory_names_only], parameter[name[brew_taps_path]]]] begin[:]
variable[taps] assign[=] call[name[_get_directory_names_only], parameter[binary_operation[name[brew_taps_path] + binary_operation[constant[/%s] <ast.Mod object at 0x7da2590d6920> name[user]]]]]
variable[taps] assign[=] <ast.GeneratorExp object at 0x7da1b1e998a0>
for taget[name[tap]] in starred[name[taps]] begin[:]
variable[tap_cmd_path] assign[=] binary_operation[name[brew_taps_path] + binary_operation[name[TAP_CMD_PATH] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1e9a6e0>, <ast.Name object at 0x7da1b1e9b1c0>]]]]
if call[name[os].path.isdir, parameter[name[tap_cmd_path]]] begin[:]
<ast.AugAssign object at 0x7da1b1e9ae00>
return[name[commands]] | keyword[def] identifier[_get_brew_tap_specific_commands] ( identifier[brew_path_prefix] ):
literal[string]
identifier[commands] =[]
identifier[brew_taps_path] = identifier[brew_path_prefix] + identifier[TAP_PATH]
keyword[for] identifier[user] keyword[in] identifier[_get_directory_names_only] ( identifier[brew_taps_path] ):
identifier[taps] = identifier[_get_directory_names_only] ( identifier[brew_taps_path] + literal[string] % identifier[user] )
identifier[taps] =( identifier[tap] keyword[for] identifier[tap] keyword[in] identifier[taps] keyword[if] identifier[tap] . identifier[startswith] ( literal[string] ))
keyword[for] identifier[tap] keyword[in] identifier[taps] :
identifier[tap_cmd_path] = identifier[brew_taps_path] + identifier[TAP_CMD_PATH] %( identifier[user] , identifier[tap] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[tap_cmd_path] ):
identifier[commands] +=( identifier[name] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[tap_cmd_path] )
keyword[if] identifier[_is_brew_tap_cmd_naming] ( identifier[name] ))
keyword[return] identifier[commands] | def _get_brew_tap_specific_commands(brew_path_prefix):
"""To get tap's specific commands
https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115"""
commands = []
brew_taps_path = brew_path_prefix + TAP_PATH
for user in _get_directory_names_only(brew_taps_path):
taps = _get_directory_names_only(brew_taps_path + '/%s' % user)
# Brew Taps's naming rule
# https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations
taps = (tap for tap in taps if tap.startswith('homebrew-'))
for tap in taps:
tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap)
if os.path.isdir(tap_cmd_path):
commands += (name.replace('brew-', '').replace('.rb', '') for name in os.listdir(tap_cmd_path) if _is_brew_tap_cmd_naming(name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tap']] # depends on [control=['for'], data=['user']]
return commands |
def ner_net(source, destinations, width, height, wrap_around=False, radius=10):
"""Produce a shortest path tree for a given net using NER.
This is the kernel of the NER algorithm.
Parameters
----------
source : (x, y)
The coordinate of the source vertex.
destinations : iterable([(x, y), ...])
The coordinates of destination vertices.
width : int
Width of the system (nodes)
height : int
Height of the system (nodes)
wrap_around : bool
True if wrap-around links should be used, false if they should be
avoided.
radius : int
Radius of area to search from each node. 20 is arbitrarily selected in
the paper and shown to be acceptable in practice.
Returns
-------
(:py:class:`~.rig.place_and_route.routing_tree.RoutingTree`,
{(x,y): :py:class:`~.rig.place_and_route.routing_tree.RoutingTree`, ...})
A RoutingTree is produced rooted at the source and visiting all
destinations but which does not contain any vertices etc. For
convenience, a dictionarry mapping from destination (x, y) coordinates
to the associated RoutingTree is provided to allow the caller to insert
these items.
"""
# Map from (x, y) to RoutingTree objects
route = {source: RoutingTree(source)}
# Handle each destination, sorted by distance from the source, closest
# first.
for destination in sorted(destinations,
key=(lambda destination:
shortest_mesh_path_length(
to_xyz(source), to_xyz(destination))
if not wrap_around else
shortest_torus_path_length(
to_xyz(source), to_xyz(destination),
width, height))):
# We shall attempt to find our nearest neighbouring placed node.
neighbour = None
# Try to find a nearby (within radius hops) node in the routing tree
# that we can route to (falling back on just routing to the source).
#
# In an implementation according to the algorithm's original
# specification looks for nodes at each point in a growing set of rings
# of concentric hexagons. If it doesn't find any destinations this
# means an awful lot of checks: 1261 for the default radius of 20.
#
# An alternative (but behaviourally identical) implementation scans the
# list of all route nodes created so far and finds the closest node
# which is < radius hops (falling back on the origin if no node is
# closer than radius hops). This implementation requires one check per
# existing route node. In most routes this is probably a lot less than
# 1261 since most routes will probably have at most a few hundred route
# nodes by the time the last destination is being routed.
#
# Which implementation is best is a difficult question to answer:
# * In principle nets with quite localised connections (e.g.
# nearest-neighbour or centroids traffic) may route slightly more
# quickly with the original algorithm since it may very quickly find
# a neighbour.
# * In nets which connect very spaced-out destinations the second
# implementation may be quicker since in such a scenario it is
# unlikely that a neighbour will be found.
# * In extremely high-fan-out nets (e.g. broadcasts), the original
# method is very likely to perform *far* better than the alternative
# method since most iterations will complete immediately while the
# alternative method must scan *all* the route vertices.
# As such, it should be clear that neither method alone is 'best' and
# both have degenerate performance in certain completely reasonable
# styles of net. As a result, a simple heuristic is used to decide
# which technique to use.
#
# The following micro-benchmarks are crude estimate of the
# runtime-per-iteration of each approach (at least in the case of a
# torus topology)::
#
# $ # Original approach
# $ python -m timeit --setup 'x, y, w, h, r = 1, 2, 5, 10, \
# {x:None for x in range(10)}' \
# 'x += 1; y += 1; x %= w; y %= h; (x, y) in r'
# 1000000 loops, best of 3: 0.207 usec per loop
# $ # Alternative approach
# $ python -m timeit --setup 'from rig.geometry import \
# shortest_torus_path_length' \
# 'shortest_torus_path_length( \
# (0, 1, 2), (3, 2, 1), 10, 10)'
# 1000000 loops, best of 3: 0.666 usec per loop
#
# From this we can approximately suggest that the alternative approach
# is 3x more expensive per iteration. A very crude heuristic is to use
# the original approach when the number of route nodes is more than
# 1/3rd of the number of routes checked by the original method.
concentric_hexagons = memoized_concentric_hexagons(radius)
if len(concentric_hexagons) < len(route) / 3:
# Original approach: Start looking for route nodes in a concentric
# spiral pattern out from the destination node.
for x, y in concentric_hexagons:
x += destination[0]
y += destination[1]
if wrap_around:
x %= width
y %= height
if (x, y) in route:
neighbour = (x, y)
break
else:
# Alternative approach: Scan over every route node and check to see
# if any are < radius, picking the closest one if so.
neighbour = None
neighbour_distance = None
for candidate_neighbour in route:
if wrap_around:
distance = shortest_torus_path_length(
to_xyz(candidate_neighbour), to_xyz(destination),
width, height)
else:
distance = shortest_mesh_path_length(
to_xyz(candidate_neighbour), to_xyz(destination))
if distance <= radius and (neighbour is None or
distance < neighbour_distance):
neighbour = candidate_neighbour
neighbour_distance = distance
# Fall back on routing directly to the source if no nodes within radius
# hops of the destination was found.
if neighbour is None:
neighbour = source
# Find the shortest vector from the neighbour to this destination
if wrap_around:
vector = shortest_torus_path(to_xyz(neighbour),
to_xyz(destination),
width, height)
else:
vector = shortest_mesh_path(to_xyz(neighbour), to_xyz(destination))
# The longest-dimension-first route may inadvertently pass through an
# already connected node. If the route is allowed to pass through that
# node it would create a cycle in the route which would be VeryBad(TM).
# As a result, we work backward through the route and truncate it at
# the first point where the route intersects with a connected node.
ldf = longest_dimension_first(vector, neighbour, width, height)
i = len(ldf)
for direction, (x, y) in reversed(ldf):
i -= 1
if (x, y) in route:
# We've just bumped into a node which is already part of the
# route, this becomes our new neighbour and we truncate the LDF
# route. (Note ldf list is truncated just after the current
# position since it gives (direction, destination) pairs).
neighbour = (x, y)
ldf = ldf[i + 1:]
break
# Take the longest dimension first route.
last_node = route[neighbour]
for direction, (x, y) in ldf:
this_node = RoutingTree((x, y))
route[(x, y)] = this_node
last_node.children.append((Routes(direction), this_node))
last_node = this_node
return (route[source], route) | def function[ner_net, parameter[source, destinations, width, height, wrap_around, radius]]:
constant[Produce a shortest path tree for a given net using NER.
This is the kernel of the NER algorithm.
Parameters
----------
source : (x, y)
The coordinate of the source vertex.
destinations : iterable([(x, y), ...])
The coordinates of destination vertices.
width : int
Width of the system (nodes)
height : int
Height of the system (nodes)
wrap_around : bool
True if wrap-around links should be used, false if they should be
avoided.
radius : int
Radius of area to search from each node. 20 is arbitrarily selected in
the paper and shown to be acceptable in practice.
Returns
-------
(:py:class:`~.rig.place_and_route.routing_tree.RoutingTree`,
{(x,y): :py:class:`~.rig.place_and_route.routing_tree.RoutingTree`, ...})
A RoutingTree is produced rooted at the source and visiting all
destinations but which does not contain any vertices etc. For
convenience, a dictionarry mapping from destination (x, y) coordinates
to the associated RoutingTree is provided to allow the caller to insert
these items.
]
variable[route] assign[=] dictionary[[<ast.Name object at 0x7da1b1814be0>], [<ast.Call object at 0x7da1b1814c40>]]
for taget[name[destination]] in starred[call[name[sorted], parameter[name[destinations]]]] begin[:]
variable[neighbour] assign[=] constant[None]
variable[concentric_hexagons] assign[=] call[name[memoized_concentric_hexagons], parameter[name[radius]]]
if compare[call[name[len], parameter[name[concentric_hexagons]]] less[<] binary_operation[call[name[len], parameter[name[route]]] / constant[3]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b18173d0>, <ast.Name object at 0x7da1b18173a0>]]] in starred[name[concentric_hexagons]] begin[:]
<ast.AugAssign object at 0x7da1b1817370>
<ast.AugAssign object at 0x7da1b1816f80>
if name[wrap_around] begin[:]
<ast.AugAssign object at 0x7da1b1817220>
<ast.AugAssign object at 0x7da1b1817100>
if compare[tuple[[<ast.Name object at 0x7da1b1816fe0>, <ast.Name object at 0x7da1b1817310>]] in name[route]] begin[:]
variable[neighbour] assign[=] tuple[[<ast.Name object at 0x7da1b1815780>, <ast.Name object at 0x7da1b18158a0>]]
break
if compare[name[neighbour] is constant[None]] begin[:]
variable[neighbour] assign[=] name[source]
if name[wrap_around] begin[:]
variable[vector] assign[=] call[name[shortest_torus_path], parameter[call[name[to_xyz], parameter[name[neighbour]]], call[name[to_xyz], parameter[name[destination]]], name[width], name[height]]]
variable[ldf] assign[=] call[name[longest_dimension_first], parameter[name[vector], name[neighbour], name[width], name[height]]]
variable[i] assign[=] call[name[len], parameter[name[ldf]]]
for taget[tuple[[<ast.Name object at 0x7da1b1971f00>, <ast.Tuple object at 0x7da1b1971fc0>]]] in starred[call[name[reversed], parameter[name[ldf]]]] begin[:]
<ast.AugAssign object at 0x7da1b1970e20>
if compare[tuple[[<ast.Name object at 0x7da1b19709a0>, <ast.Name object at 0x7da1b19702e0>]] in name[route]] begin[:]
variable[neighbour] assign[=] tuple[[<ast.Name object at 0x7da1b1971960>, <ast.Name object at 0x7da1b19710c0>]]
variable[ldf] assign[=] call[name[ldf]][<ast.Slice object at 0x7da1b1972680>]
break
variable[last_node] assign[=] call[name[route]][name[neighbour]]
for taget[tuple[[<ast.Name object at 0x7da1b19724d0>, <ast.Tuple object at 0x7da1b1971b40>]]] in starred[name[ldf]] begin[:]
variable[this_node] assign[=] call[name[RoutingTree], parameter[tuple[[<ast.Name object at 0x7da1b1971ea0>, <ast.Name object at 0x7da1b1970f70>]]]]
call[name[route]][tuple[[<ast.Name object at 0x7da1b19711e0>, <ast.Name object at 0x7da1b1971990>]]] assign[=] name[this_node]
call[name[last_node].children.append, parameter[tuple[[<ast.Call object at 0x7da1b1970e50>, <ast.Name object at 0x7da1b1971720>]]]]
variable[last_node] assign[=] name[this_node]
return[tuple[[<ast.Subscript object at 0x7da1b1970b80>, <ast.Name object at 0x7da1b1970af0>]]] | keyword[def] identifier[ner_net] ( identifier[source] , identifier[destinations] , identifier[width] , identifier[height] , identifier[wrap_around] = keyword[False] , identifier[radius] = literal[int] ):
literal[string]
identifier[route] ={ identifier[source] : identifier[RoutingTree] ( identifier[source] )}
keyword[for] identifier[destination] keyword[in] identifier[sorted] ( identifier[destinations] ,
identifier[key] =( keyword[lambda] identifier[destination] :
identifier[shortest_mesh_path_length] (
identifier[to_xyz] ( identifier[source] ), identifier[to_xyz] ( identifier[destination] ))
keyword[if] keyword[not] identifier[wrap_around] keyword[else]
identifier[shortest_torus_path_length] (
identifier[to_xyz] ( identifier[source] ), identifier[to_xyz] ( identifier[destination] ),
identifier[width] , identifier[height] ))):
identifier[neighbour] = keyword[None]
identifier[concentric_hexagons] = identifier[memoized_concentric_hexagons] ( identifier[radius] )
keyword[if] identifier[len] ( identifier[concentric_hexagons] )< identifier[len] ( identifier[route] )/ literal[int] :
keyword[for] identifier[x] , identifier[y] keyword[in] identifier[concentric_hexagons] :
identifier[x] += identifier[destination] [ literal[int] ]
identifier[y] += identifier[destination] [ literal[int] ]
keyword[if] identifier[wrap_around] :
identifier[x] %= identifier[width]
identifier[y] %= identifier[height]
keyword[if] ( identifier[x] , identifier[y] ) keyword[in] identifier[route] :
identifier[neighbour] =( identifier[x] , identifier[y] )
keyword[break]
keyword[else] :
identifier[neighbour] = keyword[None]
identifier[neighbour_distance] = keyword[None]
keyword[for] identifier[candidate_neighbour] keyword[in] identifier[route] :
keyword[if] identifier[wrap_around] :
identifier[distance] = identifier[shortest_torus_path_length] (
identifier[to_xyz] ( identifier[candidate_neighbour] ), identifier[to_xyz] ( identifier[destination] ),
identifier[width] , identifier[height] )
keyword[else] :
identifier[distance] = identifier[shortest_mesh_path_length] (
identifier[to_xyz] ( identifier[candidate_neighbour] ), identifier[to_xyz] ( identifier[destination] ))
keyword[if] identifier[distance] <= identifier[radius] keyword[and] ( identifier[neighbour] keyword[is] keyword[None] keyword[or]
identifier[distance] < identifier[neighbour_distance] ):
identifier[neighbour] = identifier[candidate_neighbour]
identifier[neighbour_distance] = identifier[distance]
keyword[if] identifier[neighbour] keyword[is] keyword[None] :
identifier[neighbour] = identifier[source]
keyword[if] identifier[wrap_around] :
identifier[vector] = identifier[shortest_torus_path] ( identifier[to_xyz] ( identifier[neighbour] ),
identifier[to_xyz] ( identifier[destination] ),
identifier[width] , identifier[height] )
keyword[else] :
identifier[vector] = identifier[shortest_mesh_path] ( identifier[to_xyz] ( identifier[neighbour] ), identifier[to_xyz] ( identifier[destination] ))
identifier[ldf] = identifier[longest_dimension_first] ( identifier[vector] , identifier[neighbour] , identifier[width] , identifier[height] )
identifier[i] = identifier[len] ( identifier[ldf] )
keyword[for] identifier[direction] ,( identifier[x] , identifier[y] ) keyword[in] identifier[reversed] ( identifier[ldf] ):
identifier[i] -= literal[int]
keyword[if] ( identifier[x] , identifier[y] ) keyword[in] identifier[route] :
identifier[neighbour] =( identifier[x] , identifier[y] )
identifier[ldf] = identifier[ldf] [ identifier[i] + literal[int] :]
keyword[break]
identifier[last_node] = identifier[route] [ identifier[neighbour] ]
keyword[for] identifier[direction] ,( identifier[x] , identifier[y] ) keyword[in] identifier[ldf] :
identifier[this_node] = identifier[RoutingTree] (( identifier[x] , identifier[y] ))
identifier[route] [( identifier[x] , identifier[y] )]= identifier[this_node]
identifier[last_node] . identifier[children] . identifier[append] (( identifier[Routes] ( identifier[direction] ), identifier[this_node] ))
identifier[last_node] = identifier[this_node]
keyword[return] ( identifier[route] [ identifier[source] ], identifier[route] ) | def ner_net(source, destinations, width, height, wrap_around=False, radius=10):
"""Produce a shortest path tree for a given net using NER.
This is the kernel of the NER algorithm.
Parameters
----------
source : (x, y)
The coordinate of the source vertex.
destinations : iterable([(x, y), ...])
The coordinates of destination vertices.
width : int
Width of the system (nodes)
height : int
Height of the system (nodes)
wrap_around : bool
True if wrap-around links should be used, false if they should be
avoided.
radius : int
Radius of area to search from each node. 20 is arbitrarily selected in
the paper and shown to be acceptable in practice.
Returns
-------
(:py:class:`~.rig.place_and_route.routing_tree.RoutingTree`,
{(x,y): :py:class:`~.rig.place_and_route.routing_tree.RoutingTree`, ...})
A RoutingTree is produced rooted at the source and visiting all
destinations but which does not contain any vertices etc. For
convenience, a dictionarry mapping from destination (x, y) coordinates
to the associated RoutingTree is provided to allow the caller to insert
these items.
"""
# Map from (x, y) to RoutingTree objects
route = {source: RoutingTree(source)}
# Handle each destination, sorted by distance from the source, closest
# first.
for destination in sorted(destinations, key=lambda destination: shortest_mesh_path_length(to_xyz(source), to_xyz(destination)) if not wrap_around else shortest_torus_path_length(to_xyz(source), to_xyz(destination), width, height)):
# We shall attempt to find our nearest neighbouring placed node.
neighbour = None
# Try to find a nearby (within radius hops) node in the routing tree
# that we can route to (falling back on just routing to the source).
#
# In an implementation according to the algorithm's original
# specification looks for nodes at each point in a growing set of rings
# of concentric hexagons. If it doesn't find any destinations this
# means an awful lot of checks: 1261 for the default radius of 20.
#
# An alternative (but behaviourally identical) implementation scans the
# list of all route nodes created so far and finds the closest node
# which is < radius hops (falling back on the origin if no node is
# closer than radius hops). This implementation requires one check per
# existing route node. In most routes this is probably a lot less than
# 1261 since most routes will probably have at most a few hundred route
# nodes by the time the last destination is being routed.
#
# Which implementation is best is a difficult question to answer:
# * In principle nets with quite localised connections (e.g.
# nearest-neighbour or centroids traffic) may route slightly more
# quickly with the original algorithm since it may very quickly find
# a neighbour.
# * In nets which connect very spaced-out destinations the second
# implementation may be quicker since in such a scenario it is
# unlikely that a neighbour will be found.
# * In extremely high-fan-out nets (e.g. broadcasts), the original
# method is very likely to perform *far* better than the alternative
# method since most iterations will complete immediately while the
# alternative method must scan *all* the route vertices.
# As such, it should be clear that neither method alone is 'best' and
# both have degenerate performance in certain completely reasonable
# styles of net. As a result, a simple heuristic is used to decide
# which technique to use.
#
# The following micro-benchmarks are crude estimate of the
# runtime-per-iteration of each approach (at least in the case of a
# torus topology)::
#
# $ # Original approach
# $ python -m timeit --setup 'x, y, w, h, r = 1, 2, 5, 10, \
# {x:None for x in range(10)}' \
# 'x += 1; y += 1; x %= w; y %= h; (x, y) in r'
# 1000000 loops, best of 3: 0.207 usec per loop
# $ # Alternative approach
# $ python -m timeit --setup 'from rig.geometry import \
# shortest_torus_path_length' \
# 'shortest_torus_path_length( \
# (0, 1, 2), (3, 2, 1), 10, 10)'
# 1000000 loops, best of 3: 0.666 usec per loop
#
# From this we can approximately suggest that the alternative approach
# is 3x more expensive per iteration. A very crude heuristic is to use
# the original approach when the number of route nodes is more than
# 1/3rd of the number of routes checked by the original method.
concentric_hexagons = memoized_concentric_hexagons(radius)
if len(concentric_hexagons) < len(route) / 3:
# Original approach: Start looking for route nodes in a concentric
# spiral pattern out from the destination node.
for (x, y) in concentric_hexagons:
x += destination[0]
y += destination[1]
if wrap_around:
x %= width
y %= height # depends on [control=['if'], data=[]]
if (x, y) in route:
neighbour = (x, y)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
# Alternative approach: Scan over every route node and check to see
# if any are < radius, picking the closest one if so.
neighbour = None
neighbour_distance = None
for candidate_neighbour in route:
if wrap_around:
distance = shortest_torus_path_length(to_xyz(candidate_neighbour), to_xyz(destination), width, height) # depends on [control=['if'], data=[]]
else:
distance = shortest_mesh_path_length(to_xyz(candidate_neighbour), to_xyz(destination))
if distance <= radius and (neighbour is None or distance < neighbour_distance):
neighbour = candidate_neighbour
neighbour_distance = distance # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['candidate_neighbour']]
# Fall back on routing directly to the source if no nodes within radius
# hops of the destination was found.
if neighbour is None:
neighbour = source # depends on [control=['if'], data=['neighbour']]
# Find the shortest vector from the neighbour to this destination
if wrap_around:
vector = shortest_torus_path(to_xyz(neighbour), to_xyz(destination), width, height) # depends on [control=['if'], data=[]]
else:
vector = shortest_mesh_path(to_xyz(neighbour), to_xyz(destination))
# The longest-dimension-first route may inadvertently pass through an
# already connected node. If the route is allowed to pass through that
# node it would create a cycle in the route which would be VeryBad(TM).
# As a result, we work backward through the route and truncate it at
# the first point where the route intersects with a connected node.
ldf = longest_dimension_first(vector, neighbour, width, height)
i = len(ldf)
for (direction, (x, y)) in reversed(ldf):
i -= 1
if (x, y) in route:
# We've just bumped into a node which is already part of the
# route, this becomes our new neighbour and we truncate the LDF
# route. (Note ldf list is truncated just after the current
# position since it gives (direction, destination) pairs).
neighbour = (x, y)
ldf = ldf[i + 1:]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Take the longest dimension first route.
last_node = route[neighbour]
for (direction, (x, y)) in ldf:
this_node = RoutingTree((x, y))
route[x, y] = this_node
last_node.children.append((Routes(direction), this_node))
last_node = this_node # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['destination']]
return (route[source], route) |
def cli(env, identifier):
"""List NAS account credentials."""
nw_mgr = SoftLayer.NetworkManager(env.client)
result = nw_mgr.get_nas_credentials(identifier)
table = formatting.Table(['username', 'password'])
table.add_row([result.get('username', 'None'),
result.get('password', 'None')])
env.fout(table) | def function[cli, parameter[env, identifier]]:
constant[List NAS account credentials.]
variable[nw_mgr] assign[=] call[name[SoftLayer].NetworkManager, parameter[name[env].client]]
variable[result] assign[=] call[name[nw_mgr].get_nas_credentials, parameter[name[identifier]]]
variable[table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da207f99ed0>, <ast.Constant object at 0x7da207f9a9e0>]]]]
call[name[table].add_row, parameter[list[[<ast.Call object at 0x7da20c7c9960>, <ast.Call object at 0x7da20c7ca1a0>]]]]
call[name[env].fout, parameter[name[table]]] | keyword[def] identifier[cli] ( identifier[env] , identifier[identifier] ):
literal[string]
identifier[nw_mgr] = identifier[SoftLayer] . identifier[NetworkManager] ( identifier[env] . identifier[client] )
identifier[result] = identifier[nw_mgr] . identifier[get_nas_credentials] ( identifier[identifier] )
identifier[table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[table] . identifier[add_row] ([ identifier[result] . identifier[get] ( literal[string] , literal[string] ),
identifier[result] . identifier[get] ( literal[string] , literal[string] )])
identifier[env] . identifier[fout] ( identifier[table] ) | def cli(env, identifier):
"""List NAS account credentials."""
nw_mgr = SoftLayer.NetworkManager(env.client)
result = nw_mgr.get_nas_credentials(identifier)
table = formatting.Table(['username', 'password'])
table.add_row([result.get('username', 'None'), result.get('password', 'None')])
env.fout(table) |
def format_system_message(errno):
"""
Call FormatMessage with a system error number to retrieve
the descriptive error message.
"""
# first some flags used by FormatMessageW
ALLOCATE_BUFFER = 0x100
ARGUMENT_ARRAY = 0x2000
FROM_HMODULE = 0x800
FROM_STRING = 0x400
FROM_SYSTEM = 0x1000
IGNORE_INSERTS = 0x200
# Let FormatMessageW allocate the buffer (we'll free it below)
# Also, let it know we want a system error message.
flags = ALLOCATE_BUFFER | FROM_SYSTEM
source = None
message_id = errno
language_id = 0
result_buffer = ctypes.wintypes.LPWSTR()
buffer_size = 0
arguments = None
format_bytes = ctypes.windll.kernel32.FormatMessageW(
flags,
source,
message_id,
language_id,
ctypes.byref(result_buffer),
buffer_size,
arguments,
)
# note the following will cause an infinite loop if GetLastError
# repeatedly returns an error that cannot be formatted, although
# this should not happen.
handle_nonzero_success(format_bytes)
message = result_buffer.value
ctypes.windll.kernel32.LocalFree(result_buffer)
return message | def function[format_system_message, parameter[errno]]:
constant[
Call FormatMessage with a system error number to retrieve
the descriptive error message.
]
variable[ALLOCATE_BUFFER] assign[=] constant[256]
variable[ARGUMENT_ARRAY] assign[=] constant[8192]
variable[FROM_HMODULE] assign[=] constant[2048]
variable[FROM_STRING] assign[=] constant[1024]
variable[FROM_SYSTEM] assign[=] constant[4096]
variable[IGNORE_INSERTS] assign[=] constant[512]
variable[flags] assign[=] binary_operation[name[ALLOCATE_BUFFER] <ast.BitOr object at 0x7da2590d6aa0> name[FROM_SYSTEM]]
variable[source] assign[=] constant[None]
variable[message_id] assign[=] name[errno]
variable[language_id] assign[=] constant[0]
variable[result_buffer] assign[=] call[name[ctypes].wintypes.LPWSTR, parameter[]]
variable[buffer_size] assign[=] constant[0]
variable[arguments] assign[=] constant[None]
variable[format_bytes] assign[=] call[name[ctypes].windll.kernel32.FormatMessageW, parameter[name[flags], name[source], name[message_id], name[language_id], call[name[ctypes].byref, parameter[name[result_buffer]]], name[buffer_size], name[arguments]]]
call[name[handle_nonzero_success], parameter[name[format_bytes]]]
variable[message] assign[=] name[result_buffer].value
call[name[ctypes].windll.kernel32.LocalFree, parameter[name[result_buffer]]]
return[name[message]] | keyword[def] identifier[format_system_message] ( identifier[errno] ):
literal[string]
identifier[ALLOCATE_BUFFER] = literal[int]
identifier[ARGUMENT_ARRAY] = literal[int]
identifier[FROM_HMODULE] = literal[int]
identifier[FROM_STRING] = literal[int]
identifier[FROM_SYSTEM] = literal[int]
identifier[IGNORE_INSERTS] = literal[int]
identifier[flags] = identifier[ALLOCATE_BUFFER] | identifier[FROM_SYSTEM]
identifier[source] = keyword[None]
identifier[message_id] = identifier[errno]
identifier[language_id] = literal[int]
identifier[result_buffer] = identifier[ctypes] . identifier[wintypes] . identifier[LPWSTR] ()
identifier[buffer_size] = literal[int]
identifier[arguments] = keyword[None]
identifier[format_bytes] = identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[FormatMessageW] (
identifier[flags] ,
identifier[source] ,
identifier[message_id] ,
identifier[language_id] ,
identifier[ctypes] . identifier[byref] ( identifier[result_buffer] ),
identifier[buffer_size] ,
identifier[arguments] ,
)
identifier[handle_nonzero_success] ( identifier[format_bytes] )
identifier[message] = identifier[result_buffer] . identifier[value]
identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[LocalFree] ( identifier[result_buffer] )
keyword[return] identifier[message] | def format_system_message(errno):
"""
Call FormatMessage with a system error number to retrieve
the descriptive error message.
"""
# first some flags used by FormatMessageW
ALLOCATE_BUFFER = 256
ARGUMENT_ARRAY = 8192
FROM_HMODULE = 2048
FROM_STRING = 1024
FROM_SYSTEM = 4096
IGNORE_INSERTS = 512
# Let FormatMessageW allocate the buffer (we'll free it below)
# Also, let it know we want a system error message.
flags = ALLOCATE_BUFFER | FROM_SYSTEM
source = None
message_id = errno
language_id = 0
result_buffer = ctypes.wintypes.LPWSTR()
buffer_size = 0
arguments = None
format_bytes = ctypes.windll.kernel32.FormatMessageW(flags, source, message_id, language_id, ctypes.byref(result_buffer), buffer_size, arguments)
# note the following will cause an infinite loop if GetLastError
# repeatedly returns an error that cannot be formatted, although
# this should not happen.
handle_nonzero_success(format_bytes)
message = result_buffer.value
ctypes.windll.kernel32.LocalFree(result_buffer)
return message |
def _run_listeners(self, line):
"""
Each listener's associated regular expression is matched against raw IRC
input. If there is a match, the listener's associated function is called
with all the regular expression's matched subgroups.
"""
for regex, callbacks in self.listeners.iteritems():
match = regex.match(line)
if not match:
continue
for callback in callbacks:
callback(*match.groups()) | def function[_run_listeners, parameter[self, line]]:
constant[
Each listener's associated regular expression is matched against raw IRC
input. If there is a match, the listener's associated function is called
with all the regular expression's matched subgroups.
]
for taget[tuple[[<ast.Name object at 0x7da1b1397ee0>, <ast.Name object at 0x7da1b1396bc0>]]] in starred[call[name[self].listeners.iteritems, parameter[]]] begin[:]
variable[match] assign[=] call[name[regex].match, parameter[name[line]]]
if <ast.UnaryOp object at 0x7da1b13971f0> begin[:]
continue
for taget[name[callback]] in starred[name[callbacks]] begin[:]
call[name[callback], parameter[<ast.Starred object at 0x7da1b1334130>]] | keyword[def] identifier[_run_listeners] ( identifier[self] , identifier[line] ):
literal[string]
keyword[for] identifier[regex] , identifier[callbacks] keyword[in] identifier[self] . identifier[listeners] . identifier[iteritems] ():
identifier[match] = identifier[regex] . identifier[match] ( identifier[line] )
keyword[if] keyword[not] identifier[match] :
keyword[continue]
keyword[for] identifier[callback] keyword[in] identifier[callbacks] :
identifier[callback] (* identifier[match] . identifier[groups] ()) | def _run_listeners(self, line):
"""
Each listener's associated regular expression is matched against raw IRC
input. If there is a match, the listener's associated function is called
with all the regular expression's matched subgroups.
"""
for (regex, callbacks) in self.listeners.iteritems():
match = regex.match(line)
if not match:
continue # depends on [control=['if'], data=[]]
for callback in callbacks:
callback(*match.groups()) # depends on [control=['for'], data=['callback']] # depends on [control=['for'], data=[]] |
def unweave(
target, advices=None, pointcut=None, ctx=None, depth=1, public=False,
):
"""Unweave advices on target with input pointcut.
:param callable target: target from where checking pointcut and
weaving advices.
:param pointcut: condition for weaving advices on joinpointe.
The condition depends on its type.
:type pointcut:
- NoneType: advices are weaved on target.
- str: target name is compared to pointcut regex.
- function: called with target in parameter, if True, advices will
be weaved on target.
:param ctx: target ctx (class or instance).
:param int depth: class weaving depthing.
:param bool public: (default True) weave only on public members
:return: the intercepted functions created from input target.
"""
# ensure advices is a list if not None
if advices is not None:
if isroutine(advices):
advices = [advices]
# initialize pointcut
# do nothing if pointcut is None or is callable
if pointcut is None or callable(pointcut):
pass
# in case of str, use a name matcher
elif isinstance(pointcut, string_types):
pointcut = _namematcher(pointcut)
else:
error_msg = "Wrong pointcut to check weaving on {0}.".format(target)
advice_msg = "Must be None, or be a str or a function/method."
right_msg = "Not {0}".format(type(pointcut))
raise AdviceError(
"{0} {1} {2}".format(error_msg, advice_msg, right_msg)
)
# get the right ctx
if ctx is None:
ctx = find_ctx(target)
_unweave(
target=target, advices=advices, pointcut=pointcut,
ctx=ctx,
depth=depth, depth_predicate=_publiccallable if public else callable
) | def function[unweave, parameter[target, advices, pointcut, ctx, depth, public]]:
constant[Unweave advices on target with input pointcut.
:param callable target: target from where checking pointcut and
weaving advices.
:param pointcut: condition for weaving advices on joinpointe.
The condition depends on its type.
:type pointcut:
- NoneType: advices are weaved on target.
- str: target name is compared to pointcut regex.
- function: called with target in parameter, if True, advices will
be weaved on target.
:param ctx: target ctx (class or instance).
:param int depth: class weaving depthing.
:param bool public: (default True) weave only on public members
:return: the intercepted functions created from input target.
]
if compare[name[advices] is_not constant[None]] begin[:]
if call[name[isroutine], parameter[name[advices]]] begin[:]
variable[advices] assign[=] list[[<ast.Name object at 0x7da1b0a430a0>]]
if <ast.BoolOp object at 0x7da1b0a42320> begin[:]
pass
if compare[name[ctx] is constant[None]] begin[:]
variable[ctx] assign[=] call[name[find_ctx], parameter[name[target]]]
call[name[_unweave], parameter[]] | keyword[def] identifier[unweave] (
identifier[target] , identifier[advices] = keyword[None] , identifier[pointcut] = keyword[None] , identifier[ctx] = keyword[None] , identifier[depth] = literal[int] , identifier[public] = keyword[False] ,
):
literal[string]
keyword[if] identifier[advices] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isroutine] ( identifier[advices] ):
identifier[advices] =[ identifier[advices] ]
keyword[if] identifier[pointcut] keyword[is] keyword[None] keyword[or] identifier[callable] ( identifier[pointcut] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[pointcut] , identifier[string_types] ):
identifier[pointcut] = identifier[_namematcher] ( identifier[pointcut] )
keyword[else] :
identifier[error_msg] = literal[string] . identifier[format] ( identifier[target] )
identifier[advice_msg] = literal[string]
identifier[right_msg] = literal[string] . identifier[format] ( identifier[type] ( identifier[pointcut] ))
keyword[raise] identifier[AdviceError] (
literal[string] . identifier[format] ( identifier[error_msg] , identifier[advice_msg] , identifier[right_msg] )
)
keyword[if] identifier[ctx] keyword[is] keyword[None] :
identifier[ctx] = identifier[find_ctx] ( identifier[target] )
identifier[_unweave] (
identifier[target] = identifier[target] , identifier[advices] = identifier[advices] , identifier[pointcut] = identifier[pointcut] ,
identifier[ctx] = identifier[ctx] ,
identifier[depth] = identifier[depth] , identifier[depth_predicate] = identifier[_publiccallable] keyword[if] identifier[public] keyword[else] identifier[callable]
) | def unweave(target, advices=None, pointcut=None, ctx=None, depth=1, public=False):
"""Unweave advices on target with input pointcut.
:param callable target: target from where checking pointcut and
weaving advices.
:param pointcut: condition for weaving advices on joinpointe.
The condition depends on its type.
:type pointcut:
- NoneType: advices are weaved on target.
- str: target name is compared to pointcut regex.
- function: called with target in parameter, if True, advices will
be weaved on target.
:param ctx: target ctx (class or instance).
:param int depth: class weaving depthing.
:param bool public: (default True) weave only on public members
:return: the intercepted functions created from input target.
"""
# ensure advices is a list if not None
if advices is not None:
if isroutine(advices):
advices = [advices] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['advices']]
# initialize pointcut
# do nothing if pointcut is None or is callable
if pointcut is None or callable(pointcut):
pass # depends on [control=['if'], data=[]]
# in case of str, use a name matcher
elif isinstance(pointcut, string_types):
pointcut = _namematcher(pointcut) # depends on [control=['if'], data=[]]
else:
error_msg = 'Wrong pointcut to check weaving on {0}.'.format(target)
advice_msg = 'Must be None, or be a str or a function/method.'
right_msg = 'Not {0}'.format(type(pointcut))
raise AdviceError('{0} {1} {2}'.format(error_msg, advice_msg, right_msg))
# get the right ctx
if ctx is None:
ctx = find_ctx(target) # depends on [control=['if'], data=['ctx']]
_unweave(target=target, advices=advices, pointcut=pointcut, ctx=ctx, depth=depth, depth_predicate=_publiccallable if public else callable) |
def streams(self):
"""Returns the list of streams that belong to the device"""
result = self.db.read(self.path, {"q": "ls"})
if result is None or result.json() is None:
return []
streams = []
for s in result.json():
strm = self[s["name"]]
strm.metadata = s
streams.append(strm)
return streams | def function[streams, parameter[self]]:
constant[Returns the list of streams that belong to the device]
variable[result] assign[=] call[name[self].db.read, parameter[name[self].path, dictionary[[<ast.Constant object at 0x7da1b03b9d20>], [<ast.Constant object at 0x7da1b03bae60>]]]]
if <ast.BoolOp object at 0x7da1b03b9060> begin[:]
return[list[[]]]
variable[streams] assign[=] list[[]]
for taget[name[s]] in starred[call[name[result].json, parameter[]]] begin[:]
variable[strm] assign[=] call[name[self]][call[name[s]][constant[name]]]
name[strm].metadata assign[=] name[s]
call[name[streams].append, parameter[name[strm]]]
return[name[streams]] | keyword[def] identifier[streams] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[db] . identifier[read] ( identifier[self] . identifier[path] ,{ literal[string] : literal[string] })
keyword[if] identifier[result] keyword[is] keyword[None] keyword[or] identifier[result] . identifier[json] () keyword[is] keyword[None] :
keyword[return] []
identifier[streams] =[]
keyword[for] identifier[s] keyword[in] identifier[result] . identifier[json] ():
identifier[strm] = identifier[self] [ identifier[s] [ literal[string] ]]
identifier[strm] . identifier[metadata] = identifier[s]
identifier[streams] . identifier[append] ( identifier[strm] )
keyword[return] identifier[streams] | def streams(self):
"""Returns the list of streams that belong to the device"""
result = self.db.read(self.path, {'q': 'ls'})
if result is None or result.json() is None:
return [] # depends on [control=['if'], data=[]]
streams = []
for s in result.json():
strm = self[s['name']]
strm.metadata = s
streams.append(strm) # depends on [control=['for'], data=['s']]
return streams |
def directory_remove(self, path):
"""Removes a guest directory if empty.
Symbolic links in the final component will not be followed,
instead an not-a-directory error is reported.
in path of type str
Path to the directory that should be removed. Guest path style.
"""
if not isinstance(path, basestring):
raise TypeError("path can only be an instance of type basestring")
self._call("directoryRemove",
in_p=[path]) | def function[directory_remove, parameter[self, path]]:
constant[Removes a guest directory if empty.
Symbolic links in the final component will not be followed,
instead an not-a-directory error is reported.
in path of type str
Path to the directory that should be removed. Guest path style.
]
if <ast.UnaryOp object at 0x7da1b26ac3d0> begin[:]
<ast.Raise object at 0x7da1b26afb20>
call[name[self]._call, parameter[constant[directoryRemove]]] | keyword[def] identifier[directory_remove] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[path] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[path] ]) | def directory_remove(self, path):
"""Removes a guest directory if empty.
Symbolic links in the final component will not be followed,
instead an not-a-directory error is reported.
in path of type str
Path to the directory that should be removed. Guest path style.
"""
if not isinstance(path, basestring):
raise TypeError('path can only be an instance of type basestring') # depends on [control=['if'], data=[]]
self._call('directoryRemove', in_p=[path]) |
def python_cardinality(self, subject: str, all_are_optional: bool = False) -> str:
"""Add the appropriate python typing to subject (e.g. Optional, List, ...)
:param subject: Subject to be decorated
:param all_are_optional: Force everything to be optional
:return: Typed subject
"""
if self.multiple_elements:
rval = f"typing.List[{subject}]"
elif self.one_optional_element:
rval = subject if subject.startswith("typing.Optional[") else f"typing.Optional[{subject}]"
elif self.max == 0:
rval = "type(None)"
else:
rval = subject
if all_are_optional and not self.one_optional_element:
rval = f"typing.Optional[{rval}]"
return rval | def function[python_cardinality, parameter[self, subject, all_are_optional]]:
constant[Add the appropriate python typing to subject (e.g. Optional, List, ...)
:param subject: Subject to be decorated
:param all_are_optional: Force everything to be optional
:return: Typed subject
]
if name[self].multiple_elements begin[:]
variable[rval] assign[=] <ast.JoinedStr object at 0x7da18f00feb0>
if <ast.BoolOp object at 0x7da18f00d780> begin[:]
variable[rval] assign[=] <ast.JoinedStr object at 0x7da18f00dd80>
return[name[rval]] | keyword[def] identifier[python_cardinality] ( identifier[self] , identifier[subject] : identifier[str] , identifier[all_are_optional] : identifier[bool] = keyword[False] )-> identifier[str] :
literal[string]
keyword[if] identifier[self] . identifier[multiple_elements] :
identifier[rval] = literal[string]
keyword[elif] identifier[self] . identifier[one_optional_element] :
identifier[rval] = identifier[subject] keyword[if] identifier[subject] . identifier[startswith] ( literal[string] ) keyword[else] literal[string]
keyword[elif] identifier[self] . identifier[max] == literal[int] :
identifier[rval] = literal[string]
keyword[else] :
identifier[rval] = identifier[subject]
keyword[if] identifier[all_are_optional] keyword[and] keyword[not] identifier[self] . identifier[one_optional_element] :
identifier[rval] = literal[string]
keyword[return] identifier[rval] | def python_cardinality(self, subject: str, all_are_optional: bool=False) -> str:
"""Add the appropriate python typing to subject (e.g. Optional, List, ...)
:param subject: Subject to be decorated
:param all_are_optional: Force everything to be optional
:return: Typed subject
"""
if self.multiple_elements:
rval = f'typing.List[{subject}]' # depends on [control=['if'], data=[]]
elif self.one_optional_element:
rval = subject if subject.startswith('typing.Optional[') else f'typing.Optional[{subject}]' # depends on [control=['if'], data=[]]
elif self.max == 0:
rval = 'type(None)' # depends on [control=['if'], data=[]]
else:
rval = subject
if all_are_optional and (not self.one_optional_element):
rval = f'typing.Optional[{rval}]' # depends on [control=['if'], data=[]]
return rval |
def _to_key_val_pairs(defs):
""" Helper to split strings, lists and dicts into (current, value) tuples for accumulation """
if isinstance(defs, STRING_TYPES):
# Convert 'a' to [('a', None)], or 'a.b.c' to [('a', 'b.c')]
return [defs.split('.', 1) if '.' in defs else (defs, None)]
else:
pairs = []
# Convert collections of strings or lists as above; break dicts into component items
pairs.extend(p for s in defs if isinstance(s, STRING_TYPES) for p in _to_key_val_pairs(s))
pairs.extend(p for l in defs if isinstance(l, list) for p in _to_key_val_pairs(l))
pairs.extend(p for d in defs if isinstance(d, dict) for p in iteritems(d))
return pairs | def function[_to_key_val_pairs, parameter[defs]]:
constant[ Helper to split strings, lists and dicts into (current, value) tuples for accumulation ]
if call[name[isinstance], parameter[name[defs], name[STRING_TYPES]]] begin[:]
return[list[[<ast.IfExp object at 0x7da18dc9b430>]]] | keyword[def] identifier[_to_key_val_pairs] ( identifier[defs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[defs] , identifier[STRING_TYPES] ):
keyword[return] [ identifier[defs] . identifier[split] ( literal[string] , literal[int] ) keyword[if] literal[string] keyword[in] identifier[defs] keyword[else] ( identifier[defs] , keyword[None] )]
keyword[else] :
identifier[pairs] =[]
identifier[pairs] . identifier[extend] ( identifier[p] keyword[for] identifier[s] keyword[in] identifier[defs] keyword[if] identifier[isinstance] ( identifier[s] , identifier[STRING_TYPES] ) keyword[for] identifier[p] keyword[in] identifier[_to_key_val_pairs] ( identifier[s] ))
identifier[pairs] . identifier[extend] ( identifier[p] keyword[for] identifier[l] keyword[in] identifier[defs] keyword[if] identifier[isinstance] ( identifier[l] , identifier[list] ) keyword[for] identifier[p] keyword[in] identifier[_to_key_val_pairs] ( identifier[l] ))
identifier[pairs] . identifier[extend] ( identifier[p] keyword[for] identifier[d] keyword[in] identifier[defs] keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] ) keyword[for] identifier[p] keyword[in] identifier[iteritems] ( identifier[d] ))
keyword[return] identifier[pairs] | def _to_key_val_pairs(defs):
""" Helper to split strings, lists and dicts into (current, value) tuples for accumulation """
if isinstance(defs, STRING_TYPES):
# Convert 'a' to [('a', None)], or 'a.b.c' to [('a', 'b.c')]
return [defs.split('.', 1) if '.' in defs else (defs, None)] # depends on [control=['if'], data=[]]
else:
pairs = []
# Convert collections of strings or lists as above; break dicts into component items
pairs.extend((p for s in defs if isinstance(s, STRING_TYPES) for p in _to_key_val_pairs(s)))
pairs.extend((p for l in defs if isinstance(l, list) for p in _to_key_val_pairs(l)))
pairs.extend((p for d in defs if isinstance(d, dict) for p in iteritems(d)))
return pairs |
def closed(self, reason):
"""Callback performed when the transport is closed."""
self.server.remove_connection(self)
self.protocol.connection_lost(reason)
if not isinstance(reason, ConnectionClosed):
logger.warn("connection closed, reason: %s" % str(reason))
else:
logger.info("connection closed") | def function[closed, parameter[self, reason]]:
constant[Callback performed when the transport is closed.]
call[name[self].server.remove_connection, parameter[name[self]]]
call[name[self].protocol.connection_lost, parameter[name[reason]]]
if <ast.UnaryOp object at 0x7da1b09134f0> begin[:]
call[name[logger].warn, parameter[binary_operation[constant[connection closed, reason: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[reason]]]]]] | keyword[def] identifier[closed] ( identifier[self] , identifier[reason] ):
literal[string]
identifier[self] . identifier[server] . identifier[remove_connection] ( identifier[self] )
identifier[self] . identifier[protocol] . identifier[connection_lost] ( identifier[reason] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[reason] , identifier[ConnectionClosed] ):
identifier[logger] . identifier[warn] ( literal[string] % identifier[str] ( identifier[reason] ))
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] ) | def closed(self, reason):
"""Callback performed when the transport is closed."""
self.server.remove_connection(self)
self.protocol.connection_lost(reason)
if not isinstance(reason, ConnectionClosed):
logger.warn('connection closed, reason: %s' % str(reason)) # depends on [control=['if'], data=[]]
else:
logger.info('connection closed') |
def set_geometry(self, im, geometry, options=None):
"""Rescale the image to the new geometry.
"""
if not geometry:
return im
options = options or {}
width, height = geometry
if not width and not height:
return im
imw, imh = self.get_size(im)
# Geometry match the current size?
if (width is None) or (imw == width):
if (height is None) or (imh == height):
return im
ratio = float(imw) / imh
if width and height:
# Smaller than the target?
smaller = (imw <= width) and (imh <= height)
if smaller and not options['upscale']:
return im
resize = options.get('resize', 'fill')
if resize == 'fill':
new_width = width
new_height = int(ceil(width / ratio))
if new_height < height:
new_height = height
new_width = int(ceil(height * ratio))
elif resize == 'fit':
new_width = int(ceil(height * ratio))
new_height = height
if new_width > width:
new_width = width
new_height = int(ceil(width / ratio))
elif resize == 'stretch':
new_width = width
new_height = height
elif height:
# Smaller than the target?
smaller = imh <= height
if smaller and not options['upscale']:
return im
new_width = int(ceil(height * ratio))
new_height = height
else:
# Smaller than the target?
smaller = imw <= width
if smaller and not options['upscale']:
return im
new_width = width
new_height = int(ceil(width / ratio))
im = self.scale(im, new_width, new_height)
return im | def function[set_geometry, parameter[self, im, geometry, options]]:
constant[Rescale the image to the new geometry.
]
if <ast.UnaryOp object at 0x7da2054a7760> begin[:]
return[name[im]]
variable[options] assign[=] <ast.BoolOp object at 0x7da2054a78b0>
<ast.Tuple object at 0x7da2054a5e10> assign[=] name[geometry]
if <ast.BoolOp object at 0x7da2054a68c0> begin[:]
return[name[im]]
<ast.Tuple object at 0x7da2054a4400> assign[=] call[name[self].get_size, parameter[name[im]]]
if <ast.BoolOp object at 0x7da2054a60b0> begin[:]
if <ast.BoolOp object at 0x7da2054a5e40> begin[:]
return[name[im]]
variable[ratio] assign[=] binary_operation[call[name[float], parameter[name[imw]]] / name[imh]]
if <ast.BoolOp object at 0x7da2054a61a0> begin[:]
variable[smaller] assign[=] <ast.BoolOp object at 0x7da2054a7160>
if <ast.BoolOp object at 0x7da2054a7460> begin[:]
return[name[im]]
variable[resize] assign[=] call[name[options].get, parameter[constant[resize], constant[fill]]]
if compare[name[resize] equal[==] constant[fill]] begin[:]
variable[new_width] assign[=] name[width]
variable[new_height] assign[=] call[name[int], parameter[call[name[ceil], parameter[binary_operation[name[width] / name[ratio]]]]]]
if compare[name[new_height] less[<] name[height]] begin[:]
variable[new_height] assign[=] name[height]
variable[new_width] assign[=] call[name[int], parameter[call[name[ceil], parameter[binary_operation[name[height] * name[ratio]]]]]]
variable[im] assign[=] call[name[self].scale, parameter[name[im], name[new_width], name[new_height]]]
return[name[im]] | keyword[def] identifier[set_geometry] ( identifier[self] , identifier[im] , identifier[geometry] , identifier[options] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[geometry] :
keyword[return] identifier[im]
identifier[options] = identifier[options] keyword[or] {}
identifier[width] , identifier[height] = identifier[geometry]
keyword[if] keyword[not] identifier[width] keyword[and] keyword[not] identifier[height] :
keyword[return] identifier[im]
identifier[imw] , identifier[imh] = identifier[self] . identifier[get_size] ( identifier[im] )
keyword[if] ( identifier[width] keyword[is] keyword[None] ) keyword[or] ( identifier[imw] == identifier[width] ):
keyword[if] ( identifier[height] keyword[is] keyword[None] ) keyword[or] ( identifier[imh] == identifier[height] ):
keyword[return] identifier[im]
identifier[ratio] = identifier[float] ( identifier[imw] )/ identifier[imh]
keyword[if] identifier[width] keyword[and] identifier[height] :
identifier[smaller] =( identifier[imw] <= identifier[width] ) keyword[and] ( identifier[imh] <= identifier[height] )
keyword[if] identifier[smaller] keyword[and] keyword[not] identifier[options] [ literal[string] ]:
keyword[return] identifier[im]
identifier[resize] = identifier[options] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[resize] == literal[string] :
identifier[new_width] = identifier[width]
identifier[new_height] = identifier[int] ( identifier[ceil] ( identifier[width] / identifier[ratio] ))
keyword[if] identifier[new_height] < identifier[height] :
identifier[new_height] = identifier[height]
identifier[new_width] = identifier[int] ( identifier[ceil] ( identifier[height] * identifier[ratio] ))
keyword[elif] identifier[resize] == literal[string] :
identifier[new_width] = identifier[int] ( identifier[ceil] ( identifier[height] * identifier[ratio] ))
identifier[new_height] = identifier[height]
keyword[if] identifier[new_width] > identifier[width] :
identifier[new_width] = identifier[width]
identifier[new_height] = identifier[int] ( identifier[ceil] ( identifier[width] / identifier[ratio] ))
keyword[elif] identifier[resize] == literal[string] :
identifier[new_width] = identifier[width]
identifier[new_height] = identifier[height]
keyword[elif] identifier[height] :
identifier[smaller] = identifier[imh] <= identifier[height]
keyword[if] identifier[smaller] keyword[and] keyword[not] identifier[options] [ literal[string] ]:
keyword[return] identifier[im]
identifier[new_width] = identifier[int] ( identifier[ceil] ( identifier[height] * identifier[ratio] ))
identifier[new_height] = identifier[height]
keyword[else] :
identifier[smaller] = identifier[imw] <= identifier[width]
keyword[if] identifier[smaller] keyword[and] keyword[not] identifier[options] [ literal[string] ]:
keyword[return] identifier[im]
identifier[new_width] = identifier[width]
identifier[new_height] = identifier[int] ( identifier[ceil] ( identifier[width] / identifier[ratio] ))
identifier[im] = identifier[self] . identifier[scale] ( identifier[im] , identifier[new_width] , identifier[new_height] )
keyword[return] identifier[im] | def set_geometry(self, im, geometry, options=None):
"""Rescale the image to the new geometry.
"""
if not geometry:
return im # depends on [control=['if'], data=[]]
options = options or {}
(width, height) = geometry
if not width and (not height):
return im # depends on [control=['if'], data=[]]
(imw, imh) = self.get_size(im)
# Geometry match the current size?
if width is None or imw == width:
if height is None or imh == height:
return im # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
ratio = float(imw) / imh
if width and height:
# Smaller than the target?
smaller = imw <= width and imh <= height
if smaller and (not options['upscale']):
return im # depends on [control=['if'], data=[]]
resize = options.get('resize', 'fill')
if resize == 'fill':
new_width = width
new_height = int(ceil(width / ratio))
if new_height < height:
new_height = height
new_width = int(ceil(height * ratio)) # depends on [control=['if'], data=['new_height', 'height']] # depends on [control=['if'], data=[]]
elif resize == 'fit':
new_width = int(ceil(height * ratio))
new_height = height
if new_width > width:
new_width = width
new_height = int(ceil(width / ratio)) # depends on [control=['if'], data=['new_width', 'width']] # depends on [control=['if'], data=[]]
elif resize == 'stretch':
new_width = width
new_height = height # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif height:
# Smaller than the target?
smaller = imh <= height
if smaller and (not options['upscale']):
return im # depends on [control=['if'], data=[]]
new_width = int(ceil(height * ratio))
new_height = height # depends on [control=['if'], data=[]]
else:
# Smaller than the target?
smaller = imw <= width
if smaller and (not options['upscale']):
return im # depends on [control=['if'], data=[]]
new_width = width
new_height = int(ceil(width / ratio))
im = self.scale(im, new_width, new_height)
return im |
def iter_errors(self):
""""Lazily yields each ValidationError for the received data dict.
"""
# Deprecate
warnings.warn(
'Property "package.iter_errors" is deprecated.',
UserWarning)
return self.profile.iter_errors(self.to_dict()) | def function[iter_errors, parameter[self]]:
constant["Lazily yields each ValidationError for the received data dict.
]
call[name[warnings].warn, parameter[constant[Property "package.iter_errors" is deprecated.], name[UserWarning]]]
return[call[name[self].profile.iter_errors, parameter[call[name[self].to_dict, parameter[]]]]] | keyword[def] identifier[iter_errors] ( identifier[self] ):
literal[string]
identifier[warnings] . identifier[warn] (
literal[string] ,
identifier[UserWarning] )
keyword[return] identifier[self] . identifier[profile] . identifier[iter_errors] ( identifier[self] . identifier[to_dict] ()) | def iter_errors(self):
""""Lazily yields each ValidationError for the received data dict.
"""
# Deprecate
warnings.warn('Property "package.iter_errors" is deprecated.', UserWarning)
return self.profile.iter_errors(self.to_dict()) |
def append(self, cpe):
"""
Adds a CPE Name to the set if not already.
:param CPE cpe: CPE Name to store in set
:returns: None
:exception: ValueError - invalid version of CPE Name
TEST:
>>> from .cpeset2_2 import CPESet2_2
>>> from .cpe2_2 import CPE2_2
>>> uri1 = 'cpe:/h:hp'
>>> c1 = CPE2_2(uri1)
>>> s = CPESet2_2()
>>> s.append(c1)
"""
if cpe.VERSION != CPE.VERSION_2_2:
errmsg = "CPE Name version {0} not valid, version 2.2 expected".format(
cpe.VERSION)
raise ValueError(errmsg)
for k in self.K:
if cpe.cpe_str == k.cpe_str:
return None
self.K.append(cpe) | def function[append, parameter[self, cpe]]:
constant[
Adds a CPE Name to the set if not already.
:param CPE cpe: CPE Name to store in set
:returns: None
:exception: ValueError - invalid version of CPE Name
TEST:
>>> from .cpeset2_2 import CPESet2_2
>>> from .cpe2_2 import CPE2_2
>>> uri1 = 'cpe:/h:hp'
>>> c1 = CPE2_2(uri1)
>>> s = CPESet2_2()
>>> s.append(c1)
]
if compare[name[cpe].VERSION not_equal[!=] name[CPE].VERSION_2_2] begin[:]
variable[errmsg] assign[=] call[constant[CPE Name version {0} not valid, version 2.2 expected].format, parameter[name[cpe].VERSION]]
<ast.Raise object at 0x7da1b11e4100>
for taget[name[k]] in starred[name[self].K] begin[:]
if compare[name[cpe].cpe_str equal[==] name[k].cpe_str] begin[:]
return[constant[None]]
call[name[self].K.append, parameter[name[cpe]]] | keyword[def] identifier[append] ( identifier[self] , identifier[cpe] ):
literal[string]
keyword[if] identifier[cpe] . identifier[VERSION] != identifier[CPE] . identifier[VERSION_2_2] :
identifier[errmsg] = literal[string] . identifier[format] (
identifier[cpe] . identifier[VERSION] )
keyword[raise] identifier[ValueError] ( identifier[errmsg] )
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[K] :
keyword[if] identifier[cpe] . identifier[cpe_str] == identifier[k] . identifier[cpe_str] :
keyword[return] keyword[None]
identifier[self] . identifier[K] . identifier[append] ( identifier[cpe] ) | def append(self, cpe):
"""
Adds a CPE Name to the set if not already.
:param CPE cpe: CPE Name to store in set
:returns: None
:exception: ValueError - invalid version of CPE Name
TEST:
>>> from .cpeset2_2 import CPESet2_2
>>> from .cpe2_2 import CPE2_2
>>> uri1 = 'cpe:/h:hp'
>>> c1 = CPE2_2(uri1)
>>> s = CPESet2_2()
>>> s.append(c1)
"""
if cpe.VERSION != CPE.VERSION_2_2:
errmsg = 'CPE Name version {0} not valid, version 2.2 expected'.format(cpe.VERSION)
raise ValueError(errmsg) # depends on [control=['if'], data=[]]
for k in self.K:
if cpe.cpe_str == k.cpe_str:
return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
self.K.append(cpe) |
def enumerateURL(urlDict, outputFolder, startIndex= 0, maxErrors = 100):
"""
Function that performs the enumeration itself.
"""
for i, url in enumerate(urlDict.keys()):
# Grabbing domain name:
domain = re.findall("://(.*)/", url)[0]
# Defining the starting index
index = startIndex
# The app will stop when this value reaches maxErrors
consecutiveErrors = 0
i3Browser = browser.Browser()
# Main loop that checks if the maximum number of errors has been reached
while consecutiveErrors <= maxErrors:
# creating the new URL to download
newQuery = url.replace("<INDEX>", str(index))
print(newQuery)
# Downloading the file
try:
data = i3Browser.recoverURL(newQuery)
filename = domain.replace("/", "|") + "_" + "-profile_" + str(index).rjust(10, "0") +".html"
if urlDict[url] != None:
if urlDict[url] in data:
print(general.info("Storing resource as:\t" + filename + "..."))
# The profile was found so we will store it:
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
else:
# The profile was found so we will store it:
print(general.info("Storing resource as:\t" + filename + "..."))
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
except:
pass
#logger.error("The resource could not be downloaded.")
index+=1 | def function[enumerateURL, parameter[urlDict, outputFolder, startIndex, maxErrors]]:
constant[
Function that performs the enumeration itself.
]
for taget[tuple[[<ast.Name object at 0x7da204961de0>, <ast.Name object at 0x7da204960bb0>]]] in starred[call[name[enumerate], parameter[call[name[urlDict].keys, parameter[]]]]] begin[:]
variable[domain] assign[=] call[call[name[re].findall, parameter[constant[://(.*)/], name[url]]]][constant[0]]
variable[index] assign[=] name[startIndex]
variable[consecutiveErrors] assign[=] constant[0]
variable[i3Browser] assign[=] call[name[browser].Browser, parameter[]]
while compare[name[consecutiveErrors] less_or_equal[<=] name[maxErrors]] begin[:]
variable[newQuery] assign[=] call[name[url].replace, parameter[constant[<INDEX>], call[name[str], parameter[name[index]]]]]
call[name[print], parameter[name[newQuery]]]
<ast.Try object at 0x7da204961d80>
<ast.AugAssign object at 0x7da204962a10> | keyword[def] identifier[enumerateURL] ( identifier[urlDict] , identifier[outputFolder] , identifier[startIndex] = literal[int] , identifier[maxErrors] = literal[int] ):
literal[string]
keyword[for] identifier[i] , identifier[url] keyword[in] identifier[enumerate] ( identifier[urlDict] . identifier[keys] ()):
identifier[domain] = identifier[re] . identifier[findall] ( literal[string] , identifier[url] )[ literal[int] ]
identifier[index] = identifier[startIndex]
identifier[consecutiveErrors] = literal[int]
identifier[i3Browser] = identifier[browser] . identifier[Browser] ()
keyword[while] identifier[consecutiveErrors] <= identifier[maxErrors] :
identifier[newQuery] = identifier[url] . identifier[replace] ( literal[string] , identifier[str] ( identifier[index] ))
identifier[print] ( identifier[newQuery] )
keyword[try] :
identifier[data] = identifier[i3Browser] . identifier[recoverURL] ( identifier[newQuery] )
identifier[filename] = identifier[domain] . identifier[replace] ( literal[string] , literal[string] )+ literal[string] + literal[string] + identifier[str] ( identifier[index] ). identifier[rjust] ( literal[int] , literal[string] )+ literal[string]
keyword[if] identifier[urlDict] [ identifier[url] ]!= keyword[None] :
keyword[if] identifier[urlDict] [ identifier[url] ] keyword[in] identifier[data] :
identifier[print] ( identifier[general] . identifier[info] ( literal[string] + identifier[filename] + literal[string] ))
keyword[with] identifier[open] ( identifier[outputFolder] + literal[string] + identifier[filename] , literal[string] ) keyword[as] identifier[oF] :
identifier[oF] . identifier[write] ( identifier[data] )
keyword[else] :
identifier[print] ( identifier[general] . identifier[info] ( literal[string] + identifier[filename] + literal[string] ))
keyword[with] identifier[open] ( identifier[outputFolder] + literal[string] + identifier[filename] , literal[string] ) keyword[as] identifier[oF] :
identifier[oF] . identifier[write] ( identifier[data] )
keyword[except] :
keyword[pass]
identifier[index] += literal[int] | def enumerateURL(urlDict, outputFolder, startIndex=0, maxErrors=100):
"""
Function that performs the enumeration itself.
"""
for (i, url) in enumerate(urlDict.keys()): # Grabbing domain name:
domain = re.findall('://(.*)/', url)[0] # Defining the starting index
index = startIndex # The app will stop when this value reaches maxErrors
consecutiveErrors = 0
i3Browser = browser.Browser() # Main loop that checks if the maximum number of errors has been reached
while consecutiveErrors <= maxErrors: # creating the new URL to download
newQuery = url.replace('<INDEX>', str(index))
print(newQuery) # Downloading the file
try:
data = i3Browser.recoverURL(newQuery)
filename = domain.replace('/', '|') + '_' + '-profile_' + str(index).rjust(10, '0') + '.html'
if urlDict[url] != None:
if urlDict[url] in data:
print(general.info('Storing resource as:\t' + filename + '...')) # The profile was found so we will store it:
with open(outputFolder + '/' + filename, 'w') as oF:
oF.write(data) # depends on [control=['with'], data=['oF']] # depends on [control=['if'], data=['data']] # depends on [control=['if'], data=[]]
else: # The profile was found so we will store it:
print(general.info('Storing resource as:\t' + filename + '...'))
with open(outputFolder + '/' + filename, 'w') as oF:
oF.write(data) # depends on [control=['with'], data=['oF']] # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] #logger.error("The resource could not be downloaded.")
index += 1 # depends on [control=['while'], data=[]] # depends on [control=['for'], data=[]] |
def _copy_sources_to_generated_destination(self):
""" Copies all project files to specified directory - generated dir """
files = []
for key in FILES_EXTENSIONS.keys():
if type(self.project['export'][key]) is dict:
for k,v in self.project['export'][key].items():
files.extend(v)
elif type(self.project['export'][key]) is list:
files.extend(self.project['export'][key])
else:
files.append(self.project['export'][key])
destination = os.path.join(self.settings.root, self.project['export']['output_dir']['path'])
if os.path.exists(destination):
shutil.rmtree(destination)
for item in files:
s = os.path.join(self.settings.root, item)
d = os.path.join(destination, item)
if os.path.isdir(s):
shutil.copytree(s,d)
else:
if not os.path.exists(os.path.dirname(d)):
os.makedirs(os.path.join(self.settings.root, os.path.dirname(d)))
shutil.copy2(s,d) | def function[_copy_sources_to_generated_destination, parameter[self]]:
constant[ Copies all project files to specified directory - generated dir ]
variable[files] assign[=] list[[]]
for taget[name[key]] in starred[call[name[FILES_EXTENSIONS].keys, parameter[]]] begin[:]
if compare[call[name[type], parameter[call[call[name[self].project][constant[export]]][name[key]]]] is name[dict]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0c36290>, <ast.Name object at 0x7da1b0c36920>]]] in starred[call[call[call[name[self].project][constant[export]]][name[key]].items, parameter[]]] begin[:]
call[name[files].extend, parameter[name[v]]]
variable[destination] assign[=] call[name[os].path.join, parameter[name[self].settings.root, call[call[call[name[self].project][constant[export]]][constant[output_dir]]][constant[path]]]]
if call[name[os].path.exists, parameter[name[destination]]] begin[:]
call[name[shutil].rmtree, parameter[name[destination]]]
for taget[name[item]] in starred[name[files]] begin[:]
variable[s] assign[=] call[name[os].path.join, parameter[name[self].settings.root, name[item]]]
variable[d] assign[=] call[name[os].path.join, parameter[name[destination], name[item]]]
if call[name[os].path.isdir, parameter[name[s]]] begin[:]
call[name[shutil].copytree, parameter[name[s], name[d]]] | keyword[def] identifier[_copy_sources_to_generated_destination] ( identifier[self] ):
literal[string]
identifier[files] =[]
keyword[for] identifier[key] keyword[in] identifier[FILES_EXTENSIONS] . identifier[keys] ():
keyword[if] identifier[type] ( identifier[self] . identifier[project] [ literal[string] ][ identifier[key] ]) keyword[is] identifier[dict] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[project] [ literal[string] ][ identifier[key] ]. identifier[items] ():
identifier[files] . identifier[extend] ( identifier[v] )
keyword[elif] identifier[type] ( identifier[self] . identifier[project] [ literal[string] ][ identifier[key] ]) keyword[is] identifier[list] :
identifier[files] . identifier[extend] ( identifier[self] . identifier[project] [ literal[string] ][ identifier[key] ])
keyword[else] :
identifier[files] . identifier[append] ( identifier[self] . identifier[project] [ literal[string] ][ identifier[key] ])
identifier[destination] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[settings] . identifier[root] , identifier[self] . identifier[project] [ literal[string] ][ literal[string] ][ literal[string] ])
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[destination] ):
identifier[shutil] . identifier[rmtree] ( identifier[destination] )
keyword[for] identifier[item] keyword[in] identifier[files] :
identifier[s] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[settings] . identifier[root] , identifier[item] )
identifier[d] = identifier[os] . identifier[path] . identifier[join] ( identifier[destination] , identifier[item] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[s] ):
identifier[shutil] . identifier[copytree] ( identifier[s] , identifier[d] )
keyword[else] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[d] )):
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[settings] . identifier[root] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[d] )))
identifier[shutil] . identifier[copy2] ( identifier[s] , identifier[d] ) | def _copy_sources_to_generated_destination(self):
""" Copies all project files to specified directory - generated dir """
files = []
for key in FILES_EXTENSIONS.keys():
if type(self.project['export'][key]) is dict:
for (k, v) in self.project['export'][key].items():
files.extend(v) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif type(self.project['export'][key]) is list:
files.extend(self.project['export'][key]) # depends on [control=['if'], data=[]]
else:
files.append(self.project['export'][key]) # depends on [control=['for'], data=['key']]
destination = os.path.join(self.settings.root, self.project['export']['output_dir']['path'])
if os.path.exists(destination):
shutil.rmtree(destination) # depends on [control=['if'], data=[]]
for item in files:
s = os.path.join(self.settings.root, item)
d = os.path.join(destination, item)
if os.path.isdir(s):
shutil.copytree(s, d) # depends on [control=['if'], data=[]]
else:
if not os.path.exists(os.path.dirname(d)):
os.makedirs(os.path.join(self.settings.root, os.path.dirname(d))) # depends on [control=['if'], data=[]]
shutil.copy2(s, d) # depends on [control=['for'], data=['item']] |
def validate_wrap(self, value):
''' Checks that the correct number of elements are in ``value`` and that
each element validates agains the associated Field class
'''
if not isinstance(value, list) and not isinstance(value, tuple):
self._fail_validation_type(value, tuple, list)
for field, value in izip(self.types, list(value)):
field.validate_wrap(value) | def function[validate_wrap, parameter[self, value]]:
constant[ Checks that the correct number of elements are in ``value`` and that
each element validates agains the associated Field class
]
if <ast.BoolOp object at 0x7da1b0925d80> begin[:]
call[name[self]._fail_validation_type, parameter[name[value], name[tuple], name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b09250f0>, <ast.Name object at 0x7da1b09256c0>]]] in starred[call[name[izip], parameter[name[self].types, call[name[list], parameter[name[value]]]]]] begin[:]
call[name[field].validate_wrap, parameter[name[value]]] | keyword[def] identifier[validate_wrap] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[value] , identifier[tuple] ):
identifier[self] . identifier[_fail_validation_type] ( identifier[value] , identifier[tuple] , identifier[list] )
keyword[for] identifier[field] , identifier[value] keyword[in] identifier[izip] ( identifier[self] . identifier[types] , identifier[list] ( identifier[value] )):
identifier[field] . identifier[validate_wrap] ( identifier[value] ) | def validate_wrap(self, value):
""" Checks that the correct number of elements are in ``value`` and that
each element validates agains the associated Field class
"""
if not isinstance(value, list) and (not isinstance(value, tuple)):
self._fail_validation_type(value, tuple, list) # depends on [control=['if'], data=[]]
for (field, value) in izip(self.types, list(value)):
field.validate_wrap(value) # depends on [control=['for'], data=[]] |
def from_input(cls, input_file=sys.stdin, modify=None, backend=None):
"""
Creates a Task object, directly from the stdin, by reading one line.
If modify=True, two lines are used, first line interpreted as the
original state of the Task object, and second line as its new,
modified value. This is consistent with the TaskWarrior's hook
system.
Object created by this method should not be saved, deleted
or refreshed, as t could create a infinite loop. For this
reason, TaskWarrior instance is set to None.
Input_file argument can be used to specify the input file,
but defaults to sys.stdin.
"""
# Detect the hook type if not given directly
name = os.path.basename(sys.argv[0])
modify = name.startswith('on-modify') if modify is None else modify
# Create the TaskWarrior instance if none passed
if backend is None:
backends = importlib.import_module('tasklib.backends')
hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
backend = backends.TaskWarrior(data_location=hook_parent_dir)
# TaskWarrior instance is set to None
task = cls(backend)
# Load the data from the input
task._load_data(json.loads(input_file.readline().strip()))
# If this is a on-modify event, we are provided with additional
# line of input, which provides updated data
if modify:
task._update_data(json.loads(input_file.readline().strip()),
remove_missing=True)
return task | def function[from_input, parameter[cls, input_file, modify, backend]]:
constant[
Creates a Task object, directly from the stdin, by reading one line.
If modify=True, two lines are used, first line interpreted as the
original state of the Task object, and second line as its new,
modified value. This is consistent with the TaskWarrior's hook
system.
Object created by this method should not be saved, deleted
or refreshed, as t could create a infinite loop. For this
reason, TaskWarrior instance is set to None.
Input_file argument can be used to specify the input file,
but defaults to sys.stdin.
]
variable[name] assign[=] call[name[os].path.basename, parameter[call[name[sys].argv][constant[0]]]]
variable[modify] assign[=] <ast.IfExp object at 0x7da1b05fe9b0>
if compare[name[backend] is constant[None]] begin[:]
variable[backends] assign[=] call[name[importlib].import_module, parameter[constant[tasklib.backends]]]
variable[hook_parent_dir] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.dirname, parameter[call[name[sys].argv][constant[0]]]]]]
variable[backend] assign[=] call[name[backends].TaskWarrior, parameter[]]
variable[task] assign[=] call[name[cls], parameter[name[backend]]]
call[name[task]._load_data, parameter[call[name[json].loads, parameter[call[call[name[input_file].readline, parameter[]].strip, parameter[]]]]]]
if name[modify] begin[:]
call[name[task]._update_data, parameter[call[name[json].loads, parameter[call[call[name[input_file].readline, parameter[]].strip, parameter[]]]]]]
return[name[task]] | keyword[def] identifier[from_input] ( identifier[cls] , identifier[input_file] = identifier[sys] . identifier[stdin] , identifier[modify] = keyword[None] , identifier[backend] = keyword[None] ):
literal[string]
identifier[name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[sys] . identifier[argv] [ literal[int] ])
identifier[modify] = identifier[name] . identifier[startswith] ( literal[string] ) keyword[if] identifier[modify] keyword[is] keyword[None] keyword[else] identifier[modify]
keyword[if] identifier[backend] keyword[is] keyword[None] :
identifier[backends] = identifier[importlib] . identifier[import_module] ( literal[string] )
identifier[hook_parent_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[argv] [ literal[int] ]))
identifier[backend] = identifier[backends] . identifier[TaskWarrior] ( identifier[data_location] = identifier[hook_parent_dir] )
identifier[task] = identifier[cls] ( identifier[backend] )
identifier[task] . identifier[_load_data] ( identifier[json] . identifier[loads] ( identifier[input_file] . identifier[readline] (). identifier[strip] ()))
keyword[if] identifier[modify] :
identifier[task] . identifier[_update_data] ( identifier[json] . identifier[loads] ( identifier[input_file] . identifier[readline] (). identifier[strip] ()),
identifier[remove_missing] = keyword[True] )
keyword[return] identifier[task] | def from_input(cls, input_file=sys.stdin, modify=None, backend=None):
"""
Creates a Task object, directly from the stdin, by reading one line.
If modify=True, two lines are used, first line interpreted as the
original state of the Task object, and second line as its new,
modified value. This is consistent with the TaskWarrior's hook
system.
Object created by this method should not be saved, deleted
or refreshed, as t could create a infinite loop. For this
reason, TaskWarrior instance is set to None.
Input_file argument can be used to specify the input file,
but defaults to sys.stdin.
"""
# Detect the hook type if not given directly
name = os.path.basename(sys.argv[0])
modify = name.startswith('on-modify') if modify is None else modify
# Create the TaskWarrior instance if none passed
if backend is None:
backends = importlib.import_module('tasklib.backends')
hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
backend = backends.TaskWarrior(data_location=hook_parent_dir) # depends on [control=['if'], data=['backend']]
# TaskWarrior instance is set to None
task = cls(backend)
# Load the data from the input
task._load_data(json.loads(input_file.readline().strip()))
# If this is a on-modify event, we are provided with additional
# line of input, which provides updated data
if modify:
task._update_data(json.loads(input_file.readline().strip()), remove_missing=True) # depends on [control=['if'], data=[]]
return task |
def _len_table_cache(self):
"""Returns the length of the table cache"""
length = 0
for table in self._table_cache:
length += len(self._table_cache[table])
return length | def function[_len_table_cache, parameter[self]]:
constant[Returns the length of the table cache]
variable[length] assign[=] constant[0]
for taget[name[table]] in starred[name[self]._table_cache] begin[:]
<ast.AugAssign object at 0x7da1b1501930>
return[name[length]] | keyword[def] identifier[_len_table_cache] ( identifier[self] ):
literal[string]
identifier[length] = literal[int]
keyword[for] identifier[table] keyword[in] identifier[self] . identifier[_table_cache] :
identifier[length] += identifier[len] ( identifier[self] . identifier[_table_cache] [ identifier[table] ])
keyword[return] identifier[length] | def _len_table_cache(self):
"""Returns the length of the table cache"""
length = 0
for table in self._table_cache:
length += len(self._table_cache[table]) # depends on [control=['for'], data=['table']]
return length |
def pop(h):
"""Pop the heap value from the heap."""
n = h.size() - 1
h.swap(0, n)
down(h, 0, n)
return h.pop() | def function[pop, parameter[h]]:
constant[Pop the heap value from the heap.]
variable[n] assign[=] binary_operation[call[name[h].size, parameter[]] - constant[1]]
call[name[h].swap, parameter[constant[0], name[n]]]
call[name[down], parameter[name[h], constant[0], name[n]]]
return[call[name[h].pop, parameter[]]] | keyword[def] identifier[pop] ( identifier[h] ):
literal[string]
identifier[n] = identifier[h] . identifier[size] ()- literal[int]
identifier[h] . identifier[swap] ( literal[int] , identifier[n] )
identifier[down] ( identifier[h] , literal[int] , identifier[n] )
keyword[return] identifier[h] . identifier[pop] () | def pop(h):
"""Pop the heap value from the heap."""
n = h.size() - 1
h.swap(0, n)
down(h, 0, n)
return h.pop() |
def get_controlled_vocabularies(self, vocab_types=default_vocab_types):
"""
Get all non-method controlled vocabularies
"""
if len(VOCAB):
self.set_vocabularies()
return
data = []
controlled_vocabularies = []
# try to get online
if not set_env.OFFLINE:
url = 'https://www2.earthref.org/vocabularies/controlled.json'
try:
raw = self.get_json_online(url)
data = pd.DataFrame(raw.json())
print('-I- Importing controlled vocabularies from https://earthref.org')
except Exception as ex:
pass
#print(ex, type(ex))
# used cached
if not len(data):
print('-I- Using cached vocabularies')
fname = os.path.join(data_model_dir, "controlled_vocabularies_December_10_2018.json")
data = pd.io.json.read_json(fname, encoding='utf-8-sig')
# parse data
possible_vocabularies = data.columns
## this line means, grab every single controlled vocabulary
vocab_types = list(possible_vocabularies)
def get_cv_from_list(lst):
"""
Check a validations list from the data model.
If there is a controlled vocabulary validation,
return which category of controlled vocabulary it is.
This will generally be applied to the validations col
of the data model
"""
try:
for i in lst:
if "cv(" in i:
return i[4:-2]
except TypeError:
return None
else:
return None
vocab_col_names = []
data_model = self.data_model
for dm_key in data_model.dm:
df = data_model.dm[dm_key]
df['vocab_name'] = df['validations'].apply(get_cv_from_list)
lst = list(zip(df[df['vocab_name'].notnull()]['vocab_name'], df[df['vocab_name'].notnull()].index))
# in lst, first value is the name of the controlled vocabulary
# second value is the name of the dataframe column
vocab_col_names.extend(lst)
# vocab_col_names is now a list of tuples
# consisting of the vocabulary name and the column name
# i.e., (u'type', u'geologic_types')
# remove duplicate col_names:
vocab_col_names = sorted(set(vocab_col_names))
# add in boolean category to controlled vocabularies
bool_items = [{'item': True}, {'item': False}, {'item': 'true'},
{'item': 'false'}, {'item': 0}, {'item': 1},
{'item': 0.0}, {'item': 1.0},
{'item': 't'}, {'item': 'f'},
{'item': 'T'}, {'item': 'F'}]
series = Series({'label': 'Boolean', 'items': bool_items})
data['boolean'] = series
# use vocabulary name to get possible values for the column name
for vocab in vocab_col_names[:]:
if vocab[0] == "magic_table_column":
vocab_col_names.remove(("magic_table_column", "table_column"))
continue
items = data[vocab[0]]['items']
stripped_list = [item['item'] for item in items]
controlled_vocabularies.append(stripped_list)
# create series with the column name as the index,
# and the possible values as the values
ind_values = [i[1] for i in vocab_col_names]
vocabularies = pd.Series(controlled_vocabularies, index=ind_values)
return vocabularies | def function[get_controlled_vocabularies, parameter[self, vocab_types]]:
constant[
Get all non-method controlled vocabularies
]
if call[name[len], parameter[name[VOCAB]]] begin[:]
call[name[self].set_vocabularies, parameter[]]
return[None]
variable[data] assign[=] list[[]]
variable[controlled_vocabularies] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b05760e0> begin[:]
variable[url] assign[=] constant[https://www2.earthref.org/vocabularies/controlled.json]
<ast.Try object at 0x7da1b0577340>
if <ast.UnaryOp object at 0x7da1b0575780> begin[:]
call[name[print], parameter[constant[-I- Using cached vocabularies]]]
variable[fname] assign[=] call[name[os].path.join, parameter[name[data_model_dir], constant[controlled_vocabularies_December_10_2018.json]]]
variable[data] assign[=] call[name[pd].io.json.read_json, parameter[name[fname]]]
variable[possible_vocabularies] assign[=] name[data].columns
variable[vocab_types] assign[=] call[name[list], parameter[name[possible_vocabularies]]]
def function[get_cv_from_list, parameter[lst]]:
constant[
Check a validations list from the data model.
If there is a controlled vocabulary validation,
return which category of controlled vocabulary it is.
This will generally be applied to the validations col
of the data model
]
<ast.Try object at 0x7da1b0576f80>
variable[vocab_col_names] assign[=] list[[]]
variable[data_model] assign[=] name[self].data_model
for taget[name[dm_key]] in starred[name[data_model].dm] begin[:]
variable[df] assign[=] call[name[data_model].dm][name[dm_key]]
call[name[df]][constant[vocab_name]] assign[=] call[call[name[df]][constant[validations]].apply, parameter[name[get_cv_from_list]]]
variable[lst] assign[=] call[name[list], parameter[call[name[zip], parameter[call[call[name[df]][call[call[name[df]][constant[vocab_name]].notnull, parameter[]]]][constant[vocab_name]], call[name[df]][call[call[name[df]][constant[vocab_name]].notnull, parameter[]]].index]]]]
call[name[vocab_col_names].extend, parameter[name[lst]]]
variable[vocab_col_names] assign[=] call[name[sorted], parameter[call[name[set], parameter[name[vocab_col_names]]]]]
variable[bool_items] assign[=] list[[<ast.Dict object at 0x7da1b05748e0>, <ast.Dict object at 0x7da1b0574820>, <ast.Dict object at 0x7da1b0574130>, <ast.Dict object at 0x7da1b05740a0>, <ast.Dict object at 0x7da1b0575e10>, <ast.Dict object at 0x7da1b0575d20>, <ast.Dict object at 0x7da1b0575d50>, <ast.Dict object at 0x7da1b0575de0>, <ast.Dict object at 0x7da1b0575300>, <ast.Dict object at 0x7da1b0575ae0>, <ast.Dict object at 0x7da1b0575b40>, <ast.Dict object at 0x7da1b0575b70>]]
variable[series] assign[=] call[name[Series], parameter[dictionary[[<ast.Constant object at 0x7da1b05759f0>, <ast.Constant object at 0x7da1b0575570>], [<ast.Constant object at 0x7da1b05759c0>, <ast.Name object at 0x7da1b0575960>]]]]
call[name[data]][constant[boolean]] assign[=] name[series]
for taget[name[vocab]] in starred[call[name[vocab_col_names]][<ast.Slice object at 0x7da1b05fffd0>]] begin[:]
if compare[call[name[vocab]][constant[0]] equal[==] constant[magic_table_column]] begin[:]
call[name[vocab_col_names].remove, parameter[tuple[[<ast.Constant object at 0x7da1b05ffe20>, <ast.Constant object at 0x7da1b05ffdc0>]]]]
continue
variable[items] assign[=] call[call[name[data]][call[name[vocab]][constant[0]]]][constant[items]]
variable[stripped_list] assign[=] <ast.ListComp object at 0x7da1b05ff070>
call[name[controlled_vocabularies].append, parameter[name[stripped_list]]]
variable[ind_values] assign[=] <ast.ListComp object at 0x7da1b05ff2b0>
variable[vocabularies] assign[=] call[name[pd].Series, parameter[name[controlled_vocabularies]]]
return[name[vocabularies]] | keyword[def] identifier[get_controlled_vocabularies] ( identifier[self] , identifier[vocab_types] = identifier[default_vocab_types] ):
literal[string]
keyword[if] identifier[len] ( identifier[VOCAB] ):
identifier[self] . identifier[set_vocabularies] ()
keyword[return]
identifier[data] =[]
identifier[controlled_vocabularies] =[]
keyword[if] keyword[not] identifier[set_env] . identifier[OFFLINE] :
identifier[url] = literal[string]
keyword[try] :
identifier[raw] = identifier[self] . identifier[get_json_online] ( identifier[url] )
identifier[data] = identifier[pd] . identifier[DataFrame] ( identifier[raw] . identifier[json] ())
identifier[print] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[pass]
keyword[if] keyword[not] identifier[len] ( identifier[data] ):
identifier[print] ( literal[string] )
identifier[fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_model_dir] , literal[string] )
identifier[data] = identifier[pd] . identifier[io] . identifier[json] . identifier[read_json] ( identifier[fname] , identifier[encoding] = literal[string] )
identifier[possible_vocabularies] = identifier[data] . identifier[columns]
identifier[vocab_types] = identifier[list] ( identifier[possible_vocabularies] )
keyword[def] identifier[get_cv_from_list] ( identifier[lst] ):
literal[string]
keyword[try] :
keyword[for] identifier[i] keyword[in] identifier[lst] :
keyword[if] literal[string] keyword[in] identifier[i] :
keyword[return] identifier[i] [ literal[int] :- literal[int] ]
keyword[except] identifier[TypeError] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] keyword[None]
identifier[vocab_col_names] =[]
identifier[data_model] = identifier[self] . identifier[data_model]
keyword[for] identifier[dm_key] keyword[in] identifier[data_model] . identifier[dm] :
identifier[df] = identifier[data_model] . identifier[dm] [ identifier[dm_key] ]
identifier[df] [ literal[string] ]= identifier[df] [ literal[string] ]. identifier[apply] ( identifier[get_cv_from_list] )
identifier[lst] = identifier[list] ( identifier[zip] ( identifier[df] [ identifier[df] [ literal[string] ]. identifier[notnull] ()][ literal[string] ], identifier[df] [ identifier[df] [ literal[string] ]. identifier[notnull] ()]. identifier[index] ))
identifier[vocab_col_names] . identifier[extend] ( identifier[lst] )
identifier[vocab_col_names] = identifier[sorted] ( identifier[set] ( identifier[vocab_col_names] ))
identifier[bool_items] =[{ literal[string] : keyword[True] },{ literal[string] : keyword[False] },{ literal[string] : literal[string] },
{ literal[string] : literal[string] },{ literal[string] : literal[int] },{ literal[string] : literal[int] },
{ literal[string] : literal[int] },{ literal[string] : literal[int] },
{ literal[string] : literal[string] },{ literal[string] : literal[string] },
{ literal[string] : literal[string] },{ literal[string] : literal[string] }]
identifier[series] = identifier[Series] ({ literal[string] : literal[string] , literal[string] : identifier[bool_items] })
identifier[data] [ literal[string] ]= identifier[series]
keyword[for] identifier[vocab] keyword[in] identifier[vocab_col_names] [:]:
keyword[if] identifier[vocab] [ literal[int] ]== literal[string] :
identifier[vocab_col_names] . identifier[remove] (( literal[string] , literal[string] ))
keyword[continue]
identifier[items] = identifier[data] [ identifier[vocab] [ literal[int] ]][ literal[string] ]
identifier[stripped_list] =[ identifier[item] [ literal[string] ] keyword[for] identifier[item] keyword[in] identifier[items] ]
identifier[controlled_vocabularies] . identifier[append] ( identifier[stripped_list] )
identifier[ind_values] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[vocab_col_names] ]
identifier[vocabularies] = identifier[pd] . identifier[Series] ( identifier[controlled_vocabularies] , identifier[index] = identifier[ind_values] )
keyword[return] identifier[vocabularies] | def get_controlled_vocabularies(self, vocab_types=default_vocab_types):
"""
Get all non-method controlled vocabularies
"""
if len(VOCAB):
self.set_vocabularies()
return # depends on [control=['if'], data=[]]
data = []
controlled_vocabularies = []
# try to get online
if not set_env.OFFLINE:
url = 'https://www2.earthref.org/vocabularies/controlled.json'
try:
raw = self.get_json_online(url)
data = pd.DataFrame(raw.json())
print('-I- Importing controlled vocabularies from https://earthref.org') # depends on [control=['try'], data=[]]
except Exception as ex:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
#print(ex, type(ex))
# used cached
if not len(data):
print('-I- Using cached vocabularies')
fname = os.path.join(data_model_dir, 'controlled_vocabularies_December_10_2018.json')
data = pd.io.json.read_json(fname, encoding='utf-8-sig') # depends on [control=['if'], data=[]]
# parse data
possible_vocabularies = data.columns
## this line means, grab every single controlled vocabulary
vocab_types = list(possible_vocabularies)
def get_cv_from_list(lst):
"""
Check a validations list from the data model.
If there is a controlled vocabulary validation,
return which category of controlled vocabulary it is.
This will generally be applied to the validations col
of the data model
"""
try:
for i in lst:
if 'cv(' in i:
return i[4:-2] # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']] # depends on [control=['try'], data=[]]
except TypeError:
return None # depends on [control=['except'], data=[]]
else:
return None
vocab_col_names = []
data_model = self.data_model
for dm_key in data_model.dm:
df = data_model.dm[dm_key]
df['vocab_name'] = df['validations'].apply(get_cv_from_list)
lst = list(zip(df[df['vocab_name'].notnull()]['vocab_name'], df[df['vocab_name'].notnull()].index))
# in lst, first value is the name of the controlled vocabulary
# second value is the name of the dataframe column
vocab_col_names.extend(lst) # depends on [control=['for'], data=['dm_key']]
# vocab_col_names is now a list of tuples
# consisting of the vocabulary name and the column name
# i.e., (u'type', u'geologic_types')
# remove duplicate col_names:
vocab_col_names = sorted(set(vocab_col_names))
# add in boolean category to controlled vocabularies
bool_items = [{'item': True}, {'item': False}, {'item': 'true'}, {'item': 'false'}, {'item': 0}, {'item': 1}, {'item': 0.0}, {'item': 1.0}, {'item': 't'}, {'item': 'f'}, {'item': 'T'}, {'item': 'F'}]
series = Series({'label': 'Boolean', 'items': bool_items})
data['boolean'] = series
# use vocabulary name to get possible values for the column name
for vocab in vocab_col_names[:]:
if vocab[0] == 'magic_table_column':
vocab_col_names.remove(('magic_table_column', 'table_column'))
continue # depends on [control=['if'], data=[]]
items = data[vocab[0]]['items']
stripped_list = [item['item'] for item in items]
controlled_vocabularies.append(stripped_list) # depends on [control=['for'], data=['vocab']]
# create series with the column name as the index,
# and the possible values as the values
ind_values = [i[1] for i in vocab_col_names]
vocabularies = pd.Series(controlled_vocabularies, index=ind_values)
return vocabularies |
def __get_datadir(self, app=False):
"""
Draw a dialog for directory selection.
"""
# if 'datadir' in self.oseg.cache.data.keys():
# if :
# directory = self.oseg.input_datapath_start
if self.cache is not None:
cache_loaddir = self.cache.get_or_none('loaddir')
self.loaddir = str(cache_loaddir)
# self.loaddir = str(self.cache.get_or_none('loaddir'))
if self.loaddir is None:
self.loaddir = ''
directory = self.loaddir
from PyQt4.QtGui import QFileDialog
if not app:
app_inner = QApplication(sys.argv)
if self._skip_get_path_dialog_for_tests:
dcmdir = self.datapath
else:
dcmdir = QFileDialog.getExistingDirectory(
caption='Select DICOM Folder',
options=QFileDialog.ShowDirsOnly,
directory=directory
)
# pp.exec_()
if not app:
app_inner.exit(0)
dcmdir = get_str(dcmdir)
if len(dcmdir) > 0:
# dcmdir = "%s" % (dcmdir)
# dcmdir = dcmdir.encode("utf8")
pass
else:
dcmdir = None
if self.cache is not None:
self.cache.update('loaddir', dcmdir)
return str(dcmdir) | def function[__get_datadir, parameter[self, app]]:
constant[
Draw a dialog for directory selection.
]
if compare[name[self].cache is_not constant[None]] begin[:]
variable[cache_loaddir] assign[=] call[name[self].cache.get_or_none, parameter[constant[loaddir]]]
name[self].loaddir assign[=] call[name[str], parameter[name[cache_loaddir]]]
if compare[name[self].loaddir is constant[None]] begin[:]
name[self].loaddir assign[=] constant[]
variable[directory] assign[=] name[self].loaddir
from relative_module[PyQt4.QtGui] import module[QFileDialog]
if <ast.UnaryOp object at 0x7da18dc99810> begin[:]
variable[app_inner] assign[=] call[name[QApplication], parameter[name[sys].argv]]
if name[self]._skip_get_path_dialog_for_tests begin[:]
variable[dcmdir] assign[=] name[self].datapath
if <ast.UnaryOp object at 0x7da18dc981f0> begin[:]
call[name[app_inner].exit, parameter[constant[0]]]
variable[dcmdir] assign[=] call[name[get_str], parameter[name[dcmdir]]]
if compare[call[name[len], parameter[name[dcmdir]]] greater[>] constant[0]] begin[:]
pass
if compare[name[self].cache is_not constant[None]] begin[:]
call[name[self].cache.update, parameter[constant[loaddir], name[dcmdir]]]
return[call[name[str], parameter[name[dcmdir]]]] | keyword[def] identifier[__get_datadir] ( identifier[self] , identifier[app] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[cache] keyword[is] keyword[not] keyword[None] :
identifier[cache_loaddir] = identifier[self] . identifier[cache] . identifier[get_or_none] ( literal[string] )
identifier[self] . identifier[loaddir] = identifier[str] ( identifier[cache_loaddir] )
keyword[if] identifier[self] . identifier[loaddir] keyword[is] keyword[None] :
identifier[self] . identifier[loaddir] = literal[string]
identifier[directory] = identifier[self] . identifier[loaddir]
keyword[from] identifier[PyQt4] . identifier[QtGui] keyword[import] identifier[QFileDialog]
keyword[if] keyword[not] identifier[app] :
identifier[app_inner] = identifier[QApplication] ( identifier[sys] . identifier[argv] )
keyword[if] identifier[self] . identifier[_skip_get_path_dialog_for_tests] :
identifier[dcmdir] = identifier[self] . identifier[datapath]
keyword[else] :
identifier[dcmdir] = identifier[QFileDialog] . identifier[getExistingDirectory] (
identifier[caption] = literal[string] ,
identifier[options] = identifier[QFileDialog] . identifier[ShowDirsOnly] ,
identifier[directory] = identifier[directory]
)
keyword[if] keyword[not] identifier[app] :
identifier[app_inner] . identifier[exit] ( literal[int] )
identifier[dcmdir] = identifier[get_str] ( identifier[dcmdir] )
keyword[if] identifier[len] ( identifier[dcmdir] )> literal[int] :
keyword[pass]
keyword[else] :
identifier[dcmdir] = keyword[None]
keyword[if] identifier[self] . identifier[cache] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[cache] . identifier[update] ( literal[string] , identifier[dcmdir] )
keyword[return] identifier[str] ( identifier[dcmdir] ) | def __get_datadir(self, app=False):
"""
Draw a dialog for directory selection.
"""
# if 'datadir' in self.oseg.cache.data.keys():
# if :
# directory = self.oseg.input_datapath_start
if self.cache is not None:
cache_loaddir = self.cache.get_or_none('loaddir')
self.loaddir = str(cache_loaddir) # depends on [control=['if'], data=[]]
# self.loaddir = str(self.cache.get_or_none('loaddir'))
if self.loaddir is None:
self.loaddir = '' # depends on [control=['if'], data=[]]
directory = self.loaddir
from PyQt4.QtGui import QFileDialog
if not app:
app_inner = QApplication(sys.argv) # depends on [control=['if'], data=[]]
if self._skip_get_path_dialog_for_tests:
dcmdir = self.datapath # depends on [control=['if'], data=[]]
else:
dcmdir = QFileDialog.getExistingDirectory(caption='Select DICOM Folder', options=QFileDialog.ShowDirsOnly, directory=directory)
# pp.exec_()
if not app:
app_inner.exit(0) # depends on [control=['if'], data=[]]
dcmdir = get_str(dcmdir)
if len(dcmdir) > 0:
# dcmdir = "%s" % (dcmdir)
# dcmdir = dcmdir.encode("utf8")
pass # depends on [control=['if'], data=[]]
else:
dcmdir = None
if self.cache is not None:
self.cache.update('loaddir', dcmdir) # depends on [control=['if'], data=[]]
return str(dcmdir) |
def clear(self):
"""Clears this instance's cache."""
if self._cache is not None:
with self._cache as c:
c.clear()
c.out_deque.clear() | def function[clear, parameter[self]]:
constant[Clears this instance's cache.]
if compare[name[self]._cache is_not constant[None]] begin[:]
with name[self]._cache begin[:]
call[name[c].clear, parameter[]]
call[name[c].out_deque.clear, parameter[]] | keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_cache] keyword[is] keyword[not] keyword[None] :
keyword[with] identifier[self] . identifier[_cache] keyword[as] identifier[c] :
identifier[c] . identifier[clear] ()
identifier[c] . identifier[out_deque] . identifier[clear] () | def clear(self):
"""Clears this instance's cache."""
if self._cache is not None:
with self._cache as c:
c.clear()
c.out_deque.clear() # depends on [control=['with'], data=['c']] # depends on [control=['if'], data=[]] |
def copy(self, deep=True, data=None):
"""Returns a copy of this object.
`deep` is ignored since data is stored in the form of
pandas.Index, which is already immutable. Dimensions, attributes
and encodings are always copied.
Use `data` to create a new object with the same structure as
original but entirely new data.
Parameters
----------
deep : bool, optional
Deep is always ignored.
data : array_like, optional
Data to use in the new object. Must have same shape as original.
Returns
-------
object : Variable
New object with dimensions, attributes, encodings, and optionally
data copied from original.
"""
if data is None:
data = self._data
else:
data = as_compatible_data(data)
if self.shape != data.shape:
raise ValueError("Data shape {} must match shape of object {}"
.format(data.shape, self.shape))
return type(self)(self.dims, data, self._attrs,
self._encoding, fastpath=True) | def function[copy, parameter[self, deep, data]]:
constant[Returns a copy of this object.
`deep` is ignored since data is stored in the form of
pandas.Index, which is already immutable. Dimensions, attributes
and encodings are always copied.
Use `data` to create a new object with the same structure as
original but entirely new data.
Parameters
----------
deep : bool, optional
Deep is always ignored.
data : array_like, optional
Data to use in the new object. Must have same shape as original.
Returns
-------
object : Variable
New object with dimensions, attributes, encodings, and optionally
data copied from original.
]
if compare[name[data] is constant[None]] begin[:]
variable[data] assign[=] name[self]._data
return[call[call[name[type], parameter[name[self]]], parameter[name[self].dims, name[data], name[self]._attrs, name[self]._encoding]]] | keyword[def] identifier[copy] ( identifier[self] , identifier[deep] = keyword[True] , identifier[data] = keyword[None] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] = identifier[self] . identifier[_data]
keyword[else] :
identifier[data] = identifier[as_compatible_data] ( identifier[data] )
keyword[if] identifier[self] . identifier[shape] != identifier[data] . identifier[shape] :
keyword[raise] identifier[ValueError] ( literal[string]
. identifier[format] ( identifier[data] . identifier[shape] , identifier[self] . identifier[shape] ))
keyword[return] identifier[type] ( identifier[self] )( identifier[self] . identifier[dims] , identifier[data] , identifier[self] . identifier[_attrs] ,
identifier[self] . identifier[_encoding] , identifier[fastpath] = keyword[True] ) | def copy(self, deep=True, data=None):
"""Returns a copy of this object.
`deep` is ignored since data is stored in the form of
pandas.Index, which is already immutable. Dimensions, attributes
and encodings are always copied.
Use `data` to create a new object with the same structure as
original but entirely new data.
Parameters
----------
deep : bool, optional
Deep is always ignored.
data : array_like, optional
Data to use in the new object. Must have same shape as original.
Returns
-------
object : Variable
New object with dimensions, attributes, encodings, and optionally
data copied from original.
"""
if data is None:
data = self._data # depends on [control=['if'], data=['data']]
else:
data = as_compatible_data(data)
if self.shape != data.shape:
raise ValueError('Data shape {} must match shape of object {}'.format(data.shape, self.shape)) # depends on [control=['if'], data=[]]
return type(self)(self.dims, data, self._attrs, self._encoding, fastpath=True) |
def get_client_rect(self):
"""
Get the window's client area coordinates in the desktop.
@rtype: L{win32.Rect}
@return: Rectangle occupied by the window's client area in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
cr = win32.GetClientRect( self.get_handle() )
cr.left, cr.top = self.client_to_screen(cr.left, cr.top)
cr.right, cr.bottom = self.client_to_screen(cr.right, cr.bottom)
return cr | def function[get_client_rect, parameter[self]]:
constant[
Get the window's client area coordinates in the desktop.
@rtype: L{win32.Rect}
@return: Rectangle occupied by the window's client area in the desktop.
@raise WindowsError: An error occured while processing this request.
]
variable[cr] assign[=] call[name[win32].GetClientRect, parameter[call[name[self].get_handle, parameter[]]]]
<ast.Tuple object at 0x7da1b08dac20> assign[=] call[name[self].client_to_screen, parameter[name[cr].left, name[cr].top]]
<ast.Tuple object at 0x7da1b08db3d0> assign[=] call[name[self].client_to_screen, parameter[name[cr].right, name[cr].bottom]]
return[name[cr]] | keyword[def] identifier[get_client_rect] ( identifier[self] ):
literal[string]
identifier[cr] = identifier[win32] . identifier[GetClientRect] ( identifier[self] . identifier[get_handle] ())
identifier[cr] . identifier[left] , identifier[cr] . identifier[top] = identifier[self] . identifier[client_to_screen] ( identifier[cr] . identifier[left] , identifier[cr] . identifier[top] )
identifier[cr] . identifier[right] , identifier[cr] . identifier[bottom] = identifier[self] . identifier[client_to_screen] ( identifier[cr] . identifier[right] , identifier[cr] . identifier[bottom] )
keyword[return] identifier[cr] | def get_client_rect(self):
"""
Get the window's client area coordinates in the desktop.
@rtype: L{win32.Rect}
@return: Rectangle occupied by the window's client area in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
cr = win32.GetClientRect(self.get_handle())
(cr.left, cr.top) = self.client_to_screen(cr.left, cr.top)
(cr.right, cr.bottom) = self.client_to_screen(cr.right, cr.bottom)
return cr |
def _get_event_type(evt_source):
"""Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
"""
if 'schedule' in evt_source:
return 'events'
elif 'pattern' in evt_source:
return 'events'
elif 'log_group_name_prefix' in evt_source:
return 'cloudwatch_logs'
else:
arn = evt_source['arn']
_, _, svc, _ = arn.split(':', 3)
return svc | def function[_get_event_type, parameter[evt_source]]:
constant[Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
]
if compare[constant[schedule] in name[evt_source]] begin[:]
return[constant[events]] | keyword[def] identifier[_get_event_type] ( identifier[evt_source] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[evt_source] :
keyword[return] literal[string]
keyword[elif] literal[string] keyword[in] identifier[evt_source] :
keyword[return] literal[string]
keyword[elif] literal[string] keyword[in] identifier[evt_source] :
keyword[return] literal[string]
keyword[else] :
identifier[arn] = identifier[evt_source] [ literal[string] ]
identifier[_] , identifier[_] , identifier[svc] , identifier[_] = identifier[arn] . identifier[split] ( literal[string] , literal[int] )
keyword[return] identifier[svc] | def _get_event_type(evt_source):
"""Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
"""
if 'schedule' in evt_source:
return 'events' # depends on [control=['if'], data=[]]
elif 'pattern' in evt_source:
return 'events' # depends on [control=['if'], data=[]]
elif 'log_group_name_prefix' in evt_source:
return 'cloudwatch_logs' # depends on [control=['if'], data=[]]
else:
arn = evt_source['arn']
(_, _, svc, _) = arn.split(':', 3)
return svc |
async def _sasl_abort(self, timeout=False):
""" Abort SASL authentication. """
if timeout:
self.logger.error('SASL authentication timed out: aborting.')
else:
self.logger.error('SASL authentication aborted.')
if self._sasl_timer:
self._sasl_timer.cancel()
self._sasl_timer = None
# We're done here.
await self.rawmsg('AUTHENTICATE', ABORT_MESSAGE)
await self._capability_negotiated('sasl') | <ast.AsyncFunctionDef object at 0x7da2054a5c30> | keyword[async] keyword[def] identifier[_sasl_abort] ( identifier[self] , identifier[timeout] = keyword[False] ):
literal[string]
keyword[if] identifier[timeout] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] )
keyword[if] identifier[self] . identifier[_sasl_timer] :
identifier[self] . identifier[_sasl_timer] . identifier[cancel] ()
identifier[self] . identifier[_sasl_timer] = keyword[None]
keyword[await] identifier[self] . identifier[rawmsg] ( literal[string] , identifier[ABORT_MESSAGE] )
keyword[await] identifier[self] . identifier[_capability_negotiated] ( literal[string] ) | async def _sasl_abort(self, timeout=False):
""" Abort SASL authentication. """
if timeout:
self.logger.error('SASL authentication timed out: aborting.') # depends on [control=['if'], data=[]]
else:
self.logger.error('SASL authentication aborted.')
if self._sasl_timer:
self._sasl_timer.cancel()
self._sasl_timer = None # depends on [control=['if'], data=[]]
# We're done here.
await self.rawmsg('AUTHENTICATE', ABORT_MESSAGE)
await self._capability_negotiated('sasl') |
def from_application_endpoint(cls, application_endpoint: str):
"""Initializes a new instance of the <see cref="LuisApplication"/> class.
:param application_endpoint: LUIS application endpoint.
:type application_endpoint: str
:return:
:rtype: LuisApplication
"""
(application_id, endpoint_key, endpoint) = LuisApplication._parse(
application_endpoint
)
return cls(application_id, endpoint_key, endpoint) | def function[from_application_endpoint, parameter[cls, application_endpoint]]:
constant[Initializes a new instance of the <see cref="LuisApplication"/> class.
:param application_endpoint: LUIS application endpoint.
:type application_endpoint: str
:return:
:rtype: LuisApplication
]
<ast.Tuple object at 0x7da207f98040> assign[=] call[name[LuisApplication]._parse, parameter[name[application_endpoint]]]
return[call[name[cls], parameter[name[application_id], name[endpoint_key], name[endpoint]]]] | keyword[def] identifier[from_application_endpoint] ( identifier[cls] , identifier[application_endpoint] : identifier[str] ):
literal[string]
( identifier[application_id] , identifier[endpoint_key] , identifier[endpoint] )= identifier[LuisApplication] . identifier[_parse] (
identifier[application_endpoint]
)
keyword[return] identifier[cls] ( identifier[application_id] , identifier[endpoint_key] , identifier[endpoint] ) | def from_application_endpoint(cls, application_endpoint: str):
"""Initializes a new instance of the <see cref="LuisApplication"/> class.
:param application_endpoint: LUIS application endpoint.
:type application_endpoint: str
:return:
:rtype: LuisApplication
"""
(application_id, endpoint_key, endpoint) = LuisApplication._parse(application_endpoint)
return cls(application_id, endpoint_key, endpoint) |
def render_js_code(self, id_, *args, **kwargs):
"""Render html container for Select2 widget with options."""
if id_:
options = self.render_select2_options_code(
dict(self.get_options()), id_)
return mark_safe(self.html.format(id=id_, options=options))
return u'' | def function[render_js_code, parameter[self, id_]]:
constant[Render html container for Select2 widget with options.]
if name[id_] begin[:]
variable[options] assign[=] call[name[self].render_select2_options_code, parameter[call[name[dict], parameter[call[name[self].get_options, parameter[]]]], name[id_]]]
return[call[name[mark_safe], parameter[call[name[self].html.format, parameter[]]]]]
return[constant[]] | keyword[def] identifier[render_js_code] ( identifier[self] , identifier[id_] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[id_] :
identifier[options] = identifier[self] . identifier[render_select2_options_code] (
identifier[dict] ( identifier[self] . identifier[get_options] ()), identifier[id_] )
keyword[return] identifier[mark_safe] ( identifier[self] . identifier[html] . identifier[format] ( identifier[id] = identifier[id_] , identifier[options] = identifier[options] ))
keyword[return] literal[string] | def render_js_code(self, id_, *args, **kwargs):
"""Render html container for Select2 widget with options."""
if id_:
options = self.render_select2_options_code(dict(self.get_options()), id_)
return mark_safe(self.html.format(id=id_, options=options)) # depends on [control=['if'], data=[]]
return u'' |
def switch_on(self, *args):
"""
Sets the state of the switch to True if on_check() returns True,
given the arguments provided in kwargs.
:param kwargs: variable length dictionary of key-pair arguments
:return: Boolean. Returns True if the operation is successful
"""
if self.on_check(*args):
return self._switch.switch(True)
else:
return False | def function[switch_on, parameter[self]]:
constant[
Sets the state of the switch to True if on_check() returns True,
given the arguments provided in kwargs.
:param kwargs: variable length dictionary of key-pair arguments
:return: Boolean. Returns True if the operation is successful
]
if call[name[self].on_check, parameter[<ast.Starred object at 0x7da1b09bf760>]] begin[:]
return[call[name[self]._switch.switch, parameter[constant[True]]]] | keyword[def] identifier[switch_on] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[on_check] (* identifier[args] ):
keyword[return] identifier[self] . identifier[_switch] . identifier[switch] ( keyword[True] )
keyword[else] :
keyword[return] keyword[False] | def switch_on(self, *args):
"""
Sets the state of the switch to True if on_check() returns True,
given the arguments provided in kwargs.
:param kwargs: variable length dictionary of key-pair arguments
:return: Boolean. Returns True if the operation is successful
"""
if self.on_check(*args):
return self._switch.switch(True) # depends on [control=['if'], data=[]]
else:
return False |
def all_label_values(self, label_list_ids=None):
"""
Return a set of all label-values occurring in this utterance.
Args:
label_list_ids (list): If not None, only label-values from
label-lists with an id contained in this list
are considered.
Returns:
:class:`set`: A set of distinct label-values.
"""
values = set()
for label_list in self.label_lists.values():
if label_list_ids is None or label_list.idx in label_list_ids:
values = values.union(label_list.label_values())
return values | def function[all_label_values, parameter[self, label_list_ids]]:
constant[
Return a set of all label-values occurring in this utterance.
Args:
label_list_ids (list): If not None, only label-values from
label-lists with an id contained in this list
are considered.
Returns:
:class:`set`: A set of distinct label-values.
]
variable[values] assign[=] call[name[set], parameter[]]
for taget[name[label_list]] in starred[call[name[self].label_lists.values, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0ed31f0> begin[:]
variable[values] assign[=] call[name[values].union, parameter[call[name[label_list].label_values, parameter[]]]]
return[name[values]] | keyword[def] identifier[all_label_values] ( identifier[self] , identifier[label_list_ids] = keyword[None] ):
literal[string]
identifier[values] = identifier[set] ()
keyword[for] identifier[label_list] keyword[in] identifier[self] . identifier[label_lists] . identifier[values] ():
keyword[if] identifier[label_list_ids] keyword[is] keyword[None] keyword[or] identifier[label_list] . identifier[idx] keyword[in] identifier[label_list_ids] :
identifier[values] = identifier[values] . identifier[union] ( identifier[label_list] . identifier[label_values] ())
keyword[return] identifier[values] | def all_label_values(self, label_list_ids=None):
"""
Return a set of all label-values occurring in this utterance.
Args:
label_list_ids (list): If not None, only label-values from
label-lists with an id contained in this list
are considered.
Returns:
:class:`set`: A set of distinct label-values.
"""
values = set()
for label_list in self.label_lists.values():
if label_list_ids is None or label_list.idx in label_list_ids:
values = values.union(label_list.label_values()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['label_list']]
return values |
def hpre(*content, sep='\n'):
"""
Make mono-width text block (HTML)
:param content:
:param sep:
:return:
"""
return _md(quote_html(_join(*content, sep=sep)), symbols=MD_SYMBOLS[7]) | def function[hpre, parameter[]]:
constant[
Make mono-width text block (HTML)
:param content:
:param sep:
:return:
]
return[call[name[_md], parameter[call[name[quote_html], parameter[call[name[_join], parameter[<ast.Starred object at 0x7da1b17b8700>]]]]]]] | keyword[def] identifier[hpre] (* identifier[content] , identifier[sep] = literal[string] ):
literal[string]
keyword[return] identifier[_md] ( identifier[quote_html] ( identifier[_join] (* identifier[content] , identifier[sep] = identifier[sep] )), identifier[symbols] = identifier[MD_SYMBOLS] [ literal[int] ]) | def hpre(*content, sep='\n'):
"""
Make mono-width text block (HTML)
:param content:
:param sep:
:return:
"""
return _md(quote_html(_join(*content, sep=sep)), symbols=MD_SYMBOLS[7]) |
def validate_cmd_response_hex(name, got, expected):
"""
Check that some value returned in the response to a command matches what
we put in the request (the command).
"""
if got != expected:
raise(pyhsm.exception.YHSM_Error("Bad %s in response (got 0x%x, expected 0x%x)" \
% (name, got, expected)))
return got | def function[validate_cmd_response_hex, parameter[name, got, expected]]:
constant[
Check that some value returned in the response to a command matches what
we put in the request (the command).
]
if compare[name[got] not_equal[!=] name[expected]] begin[:]
<ast.Raise object at 0x7da20cabdf90>
return[name[got]] | keyword[def] identifier[validate_cmd_response_hex] ( identifier[name] , identifier[got] , identifier[expected] ):
literal[string]
keyword[if] identifier[got] != identifier[expected] :
keyword[raise] ( identifier[pyhsm] . identifier[exception] . identifier[YHSM_Error] ( literal[string] %( identifier[name] , identifier[got] , identifier[expected] )))
keyword[return] identifier[got] | def validate_cmd_response_hex(name, got, expected):
"""
Check that some value returned in the response to a command matches what
we put in the request (the command).
"""
if got != expected:
raise pyhsm.exception.YHSM_Error('Bad %s in response (got 0x%x, expected 0x%x)' % (name, got, expected)) # depends on [control=['if'], data=['got', 'expected']]
return got |
def createTempFile(self, suffix=None, prefix=None, secure=True) -> 'File':
""" Create File
Creates a new file within the directory with a temporary file like name.
@return: Created file.
"""
if not secure:
raise NotImplementedError("We only support secure files at this point")
# tempfile.mkstemp(suffix=None, prefix=None, dir=None, text=False)
newFileNum, newFileRealPath = tempfile.mkstemp(
suffix=suffix, prefix=prefix, dir=self._path)
os.close(newFileNum)
relativePath = os.path.relpath(newFileRealPath, self.path)
file = File(self, pathName=relativePath, exists=True)
self._files[file.pathName] = file
return file | def function[createTempFile, parameter[self, suffix, prefix, secure]]:
constant[ Create File
Creates a new file within the directory with a temporary file like name.
@return: Created file.
]
if <ast.UnaryOp object at 0x7da204961600> begin[:]
<ast.Raise object at 0x7da204963bb0>
<ast.Tuple object at 0x7da204963a00> assign[=] call[name[tempfile].mkstemp, parameter[]]
call[name[os].close, parameter[name[newFileNum]]]
variable[relativePath] assign[=] call[name[os].path.relpath, parameter[name[newFileRealPath], name[self].path]]
variable[file] assign[=] call[name[File], parameter[name[self]]]
call[name[self]._files][name[file].pathName] assign[=] name[file]
return[name[file]] | keyword[def] identifier[createTempFile] ( identifier[self] , identifier[suffix] = keyword[None] , identifier[prefix] = keyword[None] , identifier[secure] = keyword[True] )-> literal[string] :
literal[string]
keyword[if] keyword[not] identifier[secure] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[newFileNum] , identifier[newFileRealPath] = identifier[tempfile] . identifier[mkstemp] (
identifier[suffix] = identifier[suffix] , identifier[prefix] = identifier[prefix] , identifier[dir] = identifier[self] . identifier[_path] )
identifier[os] . identifier[close] ( identifier[newFileNum] )
identifier[relativePath] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[newFileRealPath] , identifier[self] . identifier[path] )
identifier[file] = identifier[File] ( identifier[self] , identifier[pathName] = identifier[relativePath] , identifier[exists] = keyword[True] )
identifier[self] . identifier[_files] [ identifier[file] . identifier[pathName] ]= identifier[file]
keyword[return] identifier[file] | def createTempFile(self, suffix=None, prefix=None, secure=True) -> 'File':
""" Create File
Creates a new file within the directory with a temporary file like name.
@return: Created file.
"""
if not secure:
raise NotImplementedError('We only support secure files at this point') # depends on [control=['if'], data=[]]
# tempfile.mkstemp(suffix=None, prefix=None, dir=None, text=False)
(newFileNum, newFileRealPath) = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=self._path)
os.close(newFileNum)
relativePath = os.path.relpath(newFileRealPath, self.path)
file = File(self, pathName=relativePath, exists=True)
self._files[file.pathName] = file
return file |
def add_stylesheets(self, *css_files):
"""add stylesheet files in HTML head"""
for css_file in css_files:
self.main_soup.style.append(self._text_file(css_file)) | def function[add_stylesheets, parameter[self]]:
constant[add stylesheet files in HTML head]
for taget[name[css_file]] in starred[name[css_files]] begin[:]
call[name[self].main_soup.style.append, parameter[call[name[self]._text_file, parameter[name[css_file]]]]] | keyword[def] identifier[add_stylesheets] ( identifier[self] ,* identifier[css_files] ):
literal[string]
keyword[for] identifier[css_file] keyword[in] identifier[css_files] :
identifier[self] . identifier[main_soup] . identifier[style] . identifier[append] ( identifier[self] . identifier[_text_file] ( identifier[css_file] )) | def add_stylesheets(self, *css_files):
"""add stylesheet files in HTML head"""
for css_file in css_files:
self.main_soup.style.append(self._text_file(css_file)) # depends on [control=['for'], data=['css_file']] |
def remove_tag(tag_name, string):
"""
Remove open and close tags - the tags themselves only - using
a non-greedy angle bracket pattern match
"""
if not string:
return string
pattern = re.compile('</?' + tag_name + '.*?>')
string = pattern.sub('', string)
return string | def function[remove_tag, parameter[tag_name, string]]:
constant[
Remove open and close tags - the tags themselves only - using
a non-greedy angle bracket pattern match
]
if <ast.UnaryOp object at 0x7da1b168dc30> begin[:]
return[name[string]]
variable[pattern] assign[=] call[name[re].compile, parameter[binary_operation[binary_operation[constant[</?] + name[tag_name]] + constant[.*?>]]]]
variable[string] assign[=] call[name[pattern].sub, parameter[constant[], name[string]]]
return[name[string]] | keyword[def] identifier[remove_tag] ( identifier[tag_name] , identifier[string] ):
literal[string]
keyword[if] keyword[not] identifier[string] :
keyword[return] identifier[string]
identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] + identifier[tag_name] + literal[string] )
identifier[string] = identifier[pattern] . identifier[sub] ( literal[string] , identifier[string] )
keyword[return] identifier[string] | def remove_tag(tag_name, string):
"""
Remove open and close tags - the tags themselves only - using
a non-greedy angle bracket pattern match
"""
if not string:
return string # depends on [control=['if'], data=[]]
pattern = re.compile('</?' + tag_name + '.*?>')
string = pattern.sub('', string)
return string |
def OneHot0(*xs, simplify=True, conj=True):
"""
Return an expression that means
"at most one input function is true".
If *simplify* is ``True``, return a simplified expression.
If *conj* is ``True``, return a CNF.
Otherwise, return a DNF.
"""
xs = [Expression.box(x).node for x in xs]
terms = list()
if conj:
for x0, x1 in itertools.combinations(xs, 2):
terms.append(exprnode.or_(exprnode.not_(x0),
exprnode.not_(x1)))
y = exprnode.and_(*terms)
else:
for _xs in itertools.combinations(xs, len(xs) - 1):
terms.append(exprnode.and_(*[exprnode.not_(x) for x in _xs]))
y = exprnode.or_(*terms)
if simplify:
y = y.simplify()
return _expr(y) | def function[OneHot0, parameter[]]:
constant[
Return an expression that means
"at most one input function is true".
If *simplify* is ``True``, return a simplified expression.
If *conj* is ``True``, return a CNF.
Otherwise, return a DNF.
]
variable[xs] assign[=] <ast.ListComp object at 0x7da1b0d0e740>
variable[terms] assign[=] call[name[list], parameter[]]
if name[conj] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0ebc4c0>, <ast.Name object at 0x7da1b0ebcfa0>]]] in starred[call[name[itertools].combinations, parameter[name[xs], constant[2]]]] begin[:]
call[name[terms].append, parameter[call[name[exprnode].or_, parameter[call[name[exprnode].not_, parameter[name[x0]]], call[name[exprnode].not_, parameter[name[x1]]]]]]]
variable[y] assign[=] call[name[exprnode].and_, parameter[<ast.Starred object at 0x7da1b0ebc640>]]
if name[simplify] begin[:]
variable[y] assign[=] call[name[y].simplify, parameter[]]
return[call[name[_expr], parameter[name[y]]]] | keyword[def] identifier[OneHot0] (* identifier[xs] , identifier[simplify] = keyword[True] , identifier[conj] = keyword[True] ):
literal[string]
identifier[xs] =[ identifier[Expression] . identifier[box] ( identifier[x] ). identifier[node] keyword[for] identifier[x] keyword[in] identifier[xs] ]
identifier[terms] = identifier[list] ()
keyword[if] identifier[conj] :
keyword[for] identifier[x0] , identifier[x1] keyword[in] identifier[itertools] . identifier[combinations] ( identifier[xs] , literal[int] ):
identifier[terms] . identifier[append] ( identifier[exprnode] . identifier[or_] ( identifier[exprnode] . identifier[not_] ( identifier[x0] ),
identifier[exprnode] . identifier[not_] ( identifier[x1] )))
identifier[y] = identifier[exprnode] . identifier[and_] (* identifier[terms] )
keyword[else] :
keyword[for] identifier[_xs] keyword[in] identifier[itertools] . identifier[combinations] ( identifier[xs] , identifier[len] ( identifier[xs] )- literal[int] ):
identifier[terms] . identifier[append] ( identifier[exprnode] . identifier[and_] (*[ identifier[exprnode] . identifier[not_] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[_xs] ]))
identifier[y] = identifier[exprnode] . identifier[or_] (* identifier[terms] )
keyword[if] identifier[simplify] :
identifier[y] = identifier[y] . identifier[simplify] ()
keyword[return] identifier[_expr] ( identifier[y] ) | def OneHot0(*xs, simplify=True, conj=True):
"""
Return an expression that means
"at most one input function is true".
If *simplify* is ``True``, return a simplified expression.
If *conj* is ``True``, return a CNF.
Otherwise, return a DNF.
"""
xs = [Expression.box(x).node for x in xs]
terms = list()
if conj:
for (x0, x1) in itertools.combinations(xs, 2):
terms.append(exprnode.or_(exprnode.not_(x0), exprnode.not_(x1))) # depends on [control=['for'], data=[]]
y = exprnode.and_(*terms) # depends on [control=['if'], data=[]]
else:
for _xs in itertools.combinations(xs, len(xs) - 1):
terms.append(exprnode.and_(*[exprnode.not_(x) for x in _xs])) # depends on [control=['for'], data=['_xs']]
y = exprnode.or_(*terms)
if simplify:
y = y.simplify() # depends on [control=['if'], data=[]]
return _expr(y) |
def raw(config): # pragma: no cover
"""Dump the contents of LDAP to console in raw format."""
client = Client()
client.prepare_connection()
audit_api = API(client)
print(audit_api.raw()) | def function[raw, parameter[config]]:
constant[Dump the contents of LDAP to console in raw format.]
variable[client] assign[=] call[name[Client], parameter[]]
call[name[client].prepare_connection, parameter[]]
variable[audit_api] assign[=] call[name[API], parameter[name[client]]]
call[name[print], parameter[call[name[audit_api].raw, parameter[]]]] | keyword[def] identifier[raw] ( identifier[config] ):
literal[string]
identifier[client] = identifier[Client] ()
identifier[client] . identifier[prepare_connection] ()
identifier[audit_api] = identifier[API] ( identifier[client] )
identifier[print] ( identifier[audit_api] . identifier[raw] ()) | def raw(config): # pragma: no cover
'Dump the contents of LDAP to console in raw format.'
client = Client()
client.prepare_connection()
audit_api = API(client)
print(audit_api.raw()) |
def pareto_nbd_model(T, r, alpha, s, beta, size=1):
"""
Generate artificial data according to the Pareto/NBD model.
See [2]_ for model details.
Parameters
----------
T: array_like
The length of time observing new customers.
r, alpha, s, beta: float
Parameters in the model. See [1]_
size: int, optional
The number of customers to generate
Returns
-------
:obj: DataFrame
with index as customer_ids and the following columns:
'frequency', 'recency', 'T', 'lambda', 'mu', 'alive', 'customer_id'
References
----------
.. [2]: Fader, Peter S. and Bruce G. S. Hardie (2005), "A Note on Deriving the Pareto/NBD Model
and Related Expressions," <http://brucehardie.com/notes/009/>.
"""
if type(T) in [float, int]:
T = T * np.ones(size)
else:
T = np.asarray(T)
lambda_ = random.gamma(r, scale=1.0 / alpha, size=size)
mus = random.gamma(s, scale=1.0 / beta, size=size)
columns = ["frequency", "recency", "T", "lambda", "mu", "alive", "customer_id"]
df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns)
for i in range(size):
l = lambda_[i]
mu = mus[i]
time_of_death = random.exponential(scale=1.0 / mu)
# hacky until I can find something better
times = []
next_purchase_in = random.exponential(scale=1.0 / l)
while np.sum(times) + next_purchase_in < min(time_of_death, T[i]):
times.append(next_purchase_in)
next_purchase_in = random.exponential(scale=1.0 / l)
times = np.array(times).cumsum()
df.iloc[i] = (
np.unique(np.array(times).astype(int)).shape[0],
np.max(times if times.shape[0] > 0 else 0),
T[i],
l,
mu,
time_of_death > T[i],
i,
)
return df.set_index("customer_id") | def function[pareto_nbd_model, parameter[T, r, alpha, s, beta, size]]:
constant[
Generate artificial data according to the Pareto/NBD model.
See [2]_ for model details.
Parameters
----------
T: array_like
The length of time observing new customers.
r, alpha, s, beta: float
Parameters in the model. See [1]_
size: int, optional
The number of customers to generate
Returns
-------
:obj: DataFrame
with index as customer_ids and the following columns:
'frequency', 'recency', 'T', 'lambda', 'mu', 'alive', 'customer_id'
References
----------
.. [2]: Fader, Peter S. and Bruce G. S. Hardie (2005), "A Note on Deriving the Pareto/NBD Model
and Related Expressions," <http://brucehardie.com/notes/009/>.
]
if compare[call[name[type], parameter[name[T]]] in list[[<ast.Name object at 0x7da1b1d0f7f0>, <ast.Name object at 0x7da1b1d0f820>]]] begin[:]
variable[T] assign[=] binary_operation[name[T] * call[name[np].ones, parameter[name[size]]]]
variable[lambda_] assign[=] call[name[random].gamma, parameter[name[r]]]
variable[mus] assign[=] call[name[random].gamma, parameter[name[s]]]
variable[columns] assign[=] list[[<ast.Constant object at 0x7da1b1d0e410>, <ast.Constant object at 0x7da1b1d0e470>, <ast.Constant object at 0x7da1b1d0e350>, <ast.Constant object at 0x7da1b1d0e320>, <ast.Constant object at 0x7da1b1d0e380>, <ast.Constant object at 0x7da1b1d0e4a0>, <ast.Constant object at 0x7da1b1d0e2c0>]]
variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1d0e1d0>, <ast.Call object at 0x7da1b1d0e0e0>]]]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[size]]]] begin[:]
variable[l] assign[=] call[name[lambda_]][name[i]]
variable[mu] assign[=] call[name[mus]][name[i]]
variable[time_of_death] assign[=] call[name[random].exponential, parameter[]]
variable[times] assign[=] list[[]]
variable[next_purchase_in] assign[=] call[name[random].exponential, parameter[]]
while compare[binary_operation[call[name[np].sum, parameter[name[times]]] + name[next_purchase_in]] less[<] call[name[min], parameter[name[time_of_death], call[name[T]][name[i]]]]] begin[:]
call[name[times].append, parameter[name[next_purchase_in]]]
variable[next_purchase_in] assign[=] call[name[random].exponential, parameter[]]
variable[times] assign[=] call[call[name[np].array, parameter[name[times]]].cumsum, parameter[]]
call[name[df].iloc][name[i]] assign[=] tuple[[<ast.Subscript object at 0x7da1b1d0d270>, <ast.Call object at 0x7da1b1d0d5a0>, <ast.Subscript object at 0x7da1b1d0d960>, <ast.Name object at 0x7da1b1d0f1f0>, <ast.Name object at 0x7da1b1d0f2b0>, <ast.Compare object at 0x7da1b1d0f280>, <ast.Name object at 0x7da1b1d0de10>]]
return[call[name[df].set_index, parameter[constant[customer_id]]]] | keyword[def] identifier[pareto_nbd_model] ( identifier[T] , identifier[r] , identifier[alpha] , identifier[s] , identifier[beta] , identifier[size] = literal[int] ):
literal[string]
keyword[if] identifier[type] ( identifier[T] ) keyword[in] [ identifier[float] , identifier[int] ]:
identifier[T] = identifier[T] * identifier[np] . identifier[ones] ( identifier[size] )
keyword[else] :
identifier[T] = identifier[np] . identifier[asarray] ( identifier[T] )
identifier[lambda_] = identifier[random] . identifier[gamma] ( identifier[r] , identifier[scale] = literal[int] / identifier[alpha] , identifier[size] = identifier[size] )
identifier[mus] = identifier[random] . identifier[gamma] ( identifier[s] , identifier[scale] = literal[int] / identifier[beta] , identifier[size] = identifier[size] )
identifier[columns] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[np] . identifier[zeros] (( identifier[size] , identifier[len] ( identifier[columns] ))), identifier[columns] = identifier[columns] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] ):
identifier[l] = identifier[lambda_] [ identifier[i] ]
identifier[mu] = identifier[mus] [ identifier[i] ]
identifier[time_of_death] = identifier[random] . identifier[exponential] ( identifier[scale] = literal[int] / identifier[mu] )
identifier[times] =[]
identifier[next_purchase_in] = identifier[random] . identifier[exponential] ( identifier[scale] = literal[int] / identifier[l] )
keyword[while] identifier[np] . identifier[sum] ( identifier[times] )+ identifier[next_purchase_in] < identifier[min] ( identifier[time_of_death] , identifier[T] [ identifier[i] ]):
identifier[times] . identifier[append] ( identifier[next_purchase_in] )
identifier[next_purchase_in] = identifier[random] . identifier[exponential] ( identifier[scale] = literal[int] / identifier[l] )
identifier[times] = identifier[np] . identifier[array] ( identifier[times] ). identifier[cumsum] ()
identifier[df] . identifier[iloc] [ identifier[i] ]=(
identifier[np] . identifier[unique] ( identifier[np] . identifier[array] ( identifier[times] ). identifier[astype] ( identifier[int] )). identifier[shape] [ literal[int] ],
identifier[np] . identifier[max] ( identifier[times] keyword[if] identifier[times] . identifier[shape] [ literal[int] ]> literal[int] keyword[else] literal[int] ),
identifier[T] [ identifier[i] ],
identifier[l] ,
identifier[mu] ,
identifier[time_of_death] > identifier[T] [ identifier[i] ],
identifier[i] ,
)
keyword[return] identifier[df] . identifier[set_index] ( literal[string] ) | def pareto_nbd_model(T, r, alpha, s, beta, size=1):
"""
Generate artificial data according to the Pareto/NBD model.
See [2]_ for model details.
Parameters
----------
T: array_like
The length of time observing new customers.
r, alpha, s, beta: float
Parameters in the model. See [1]_
size: int, optional
The number of customers to generate
Returns
-------
:obj: DataFrame
with index as customer_ids and the following columns:
'frequency', 'recency', 'T', 'lambda', 'mu', 'alive', 'customer_id'
References
----------
.. [2]: Fader, Peter S. and Bruce G. S. Hardie (2005), "A Note on Deriving the Pareto/NBD Model
and Related Expressions," <http://brucehardie.com/notes/009/>.
"""
if type(T) in [float, int]:
T = T * np.ones(size) # depends on [control=['if'], data=[]]
else:
T = np.asarray(T)
lambda_ = random.gamma(r, scale=1.0 / alpha, size=size)
mus = random.gamma(s, scale=1.0 / beta, size=size)
columns = ['frequency', 'recency', 'T', 'lambda', 'mu', 'alive', 'customer_id']
df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns)
for i in range(size):
l = lambda_[i]
mu = mus[i]
time_of_death = random.exponential(scale=1.0 / mu)
# hacky until I can find something better
times = []
next_purchase_in = random.exponential(scale=1.0 / l)
while np.sum(times) + next_purchase_in < min(time_of_death, T[i]):
times.append(next_purchase_in)
next_purchase_in = random.exponential(scale=1.0 / l) # depends on [control=['while'], data=[]]
times = np.array(times).cumsum()
df.iloc[i] = (np.unique(np.array(times).astype(int)).shape[0], np.max(times if times.shape[0] > 0 else 0), T[i], l, mu, time_of_death > T[i], i) # depends on [control=['for'], data=['i']]
return df.set_index('customer_id') |
def middle_end(self, index):
"""
Set the index (+1) where MIDDLE ends.
:param int index: the new index for MIDDLE end
"""
if (index < 0) or (index > self.all_length):
raise ValueError(u"The given index is not valid")
self.__middle_end = index | def function[middle_end, parameter[self, index]]:
constant[
Set the index (+1) where MIDDLE ends.
:param int index: the new index for MIDDLE end
]
if <ast.BoolOp object at 0x7da1b18fa0e0> begin[:]
<ast.Raise object at 0x7da1b18f88e0>
name[self].__middle_end assign[=] name[index] | keyword[def] identifier[middle_end] ( identifier[self] , identifier[index] ):
literal[string]
keyword[if] ( identifier[index] < literal[int] ) keyword[or] ( identifier[index] > identifier[self] . identifier[all_length] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[__middle_end] = identifier[index] | def middle_end(self, index):
"""
Set the index (+1) where MIDDLE ends.
:param int index: the new index for MIDDLE end
"""
if index < 0 or index > self.all_length:
raise ValueError(u'The given index is not valid') # depends on [control=['if'], data=[]]
self.__middle_end = index |
def calculate_convolution_output_shapes(operator):
'''
Allowed input/output patterns are
1. [N, C, H, W] ---> [N, C, H', W']
'''
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
params = operator.raw_operator.convolution
input_shape = operator.inputs[0].type.shape
operator.outputs[0].type.shape = [0, 0, 0, 0] # Initialize output shape. It will be modified below.
output_shape = operator.outputs[0].type.shape
# Adjust N-axis
output_shape[0] = input_shape[0]
# Adjust C-axis
output_shape[1] = params.outputChannels
# Set up default and non-default parameters
dilations = [1, 1]
if len(params.dilationFactor) > 0:
dilations = [params.dilationFactor[0], params.dilationFactor[1]]
kernel_shape = [3, 3]
if len(params.kernelSize) > 0:
kernel_shape = params.kernelSize
strides = [1, 1]
if len(params.stride) > 0:
strides = params.stride
specified_output_shape = [0, 0] # Only used with convolution transpose
if params.isDeconvolution and len(params.outputShape) > 0:
specified_output_shape = list(int(i) for i in params.outputShape)
pad_mode = params.WhichOneof('ConvolutionPaddingType')
if pad_mode == 'valid' and len(params.valid.paddingAmounts.borderAmounts) > 0:
pad_amounts = params.valid.paddingAmounts.borderAmounts
pad_heads = [pad_amounts[0].startEdgeSize, pad_amounts[1].startEdgeSize]
pad_tails = [pad_amounts[0].endEdgeSize, pad_amounts[1].endEdgeSize]
else:
# Padding amounts are useless for same padding and valid padding uses [0, 0] by default.
pad_heads = [0, 0]
pad_tails = [0, 0]
# Adjust H- and W-axes
for i in range(2):
if params.isDeconvolution:
output_shape[i + 2] = calculate_convolution_transpose_1D_output_shape(
input_shape[i + 2], kernel_shape[i], dilations[i], strides[i],
pad_mode, pad_heads[i], pad_tails[i], specified_output_shape[i])
else:
output_shape[i + 2] = calculate_convolution_and_pooling_1D_output_shape(
input_shape[i + 2], kernel_shape[i], dilations[i], strides[i],
pad_mode, pad_heads[i], pad_tails[i]) | def function[calculate_convolution_output_shapes, parameter[operator]]:
constant[
Allowed input/output patterns are
1. [N, C, H, W] ---> [N, C, H', W']
]
call[name[check_input_and_output_numbers], parameter[name[operator]]]
variable[params] assign[=] name[operator].raw_operator.convolution
variable[input_shape] assign[=] call[name[operator].inputs][constant[0]].type.shape
call[name[operator].outputs][constant[0]].type.shape assign[=] list[[<ast.Constant object at 0x7da1b1d49b40>, <ast.Constant object at 0x7da1b1d49b70>, <ast.Constant object at 0x7da1b1d4ba00>, <ast.Constant object at 0x7da1b1d4b940>]]
variable[output_shape] assign[=] call[name[operator].outputs][constant[0]].type.shape
call[name[output_shape]][constant[0]] assign[=] call[name[input_shape]][constant[0]]
call[name[output_shape]][constant[1]] assign[=] name[params].outputChannels
variable[dilations] assign[=] list[[<ast.Constant object at 0x7da1b1d48790>, <ast.Constant object at 0x7da1b1d48bb0>]]
if compare[call[name[len], parameter[name[params].dilationFactor]] greater[>] constant[0]] begin[:]
variable[dilations] assign[=] list[[<ast.Subscript object at 0x7da1b1d48e20>, <ast.Subscript object at 0x7da1b1d48fa0>]]
variable[kernel_shape] assign[=] list[[<ast.Constant object at 0x7da1b1d4a710>, <ast.Constant object at 0x7da1b1d4a6e0>]]
if compare[call[name[len], parameter[name[params].kernelSize]] greater[>] constant[0]] begin[:]
variable[kernel_shape] assign[=] name[params].kernelSize
variable[strides] assign[=] list[[<ast.Constant object at 0x7da1b1d49240>, <ast.Constant object at 0x7da1b1d49210>]]
if compare[call[name[len], parameter[name[params].stride]] greater[>] constant[0]] begin[:]
variable[strides] assign[=] name[params].stride
variable[specified_output_shape] assign[=] list[[<ast.Constant object at 0x7da1b1d4a530>, <ast.Constant object at 0x7da1b1d4a140>]]
if <ast.BoolOp object at 0x7da1b1d4a200> begin[:]
variable[specified_output_shape] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da1b1d4ab00>]]
variable[pad_mode] assign[=] call[name[params].WhichOneof, parameter[constant[ConvolutionPaddingType]]]
if <ast.BoolOp object at 0x7da1b1d4a560> begin[:]
variable[pad_amounts] assign[=] name[params].valid.paddingAmounts.borderAmounts
variable[pad_heads] assign[=] list[[<ast.Attribute object at 0x7da1b1d4ac80>, <ast.Attribute object at 0x7da1b1d4bf10>]]
variable[pad_tails] assign[=] list[[<ast.Attribute object at 0x7da1b1d4a680>, <ast.Attribute object at 0x7da1b1d49660>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[2]]]] begin[:]
if name[params].isDeconvolution begin[:]
call[name[output_shape]][binary_operation[name[i] + constant[2]]] assign[=] call[name[calculate_convolution_transpose_1D_output_shape], parameter[call[name[input_shape]][binary_operation[name[i] + constant[2]]], call[name[kernel_shape]][name[i]], call[name[dilations]][name[i]], call[name[strides]][name[i]], name[pad_mode], call[name[pad_heads]][name[i]], call[name[pad_tails]][name[i]], call[name[specified_output_shape]][name[i]]]] | keyword[def] identifier[calculate_convolution_output_shapes] ( identifier[operator] ):
literal[string]
identifier[check_input_and_output_numbers] ( identifier[operator] , identifier[input_count_range] = literal[int] , identifier[output_count_range] = literal[int] )
identifier[params] = identifier[operator] . identifier[raw_operator] . identifier[convolution]
identifier[input_shape] = identifier[operator] . identifier[inputs] [ literal[int] ]. identifier[type] . identifier[shape]
identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] . identifier[shape] =[ literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[output_shape] = identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] . identifier[shape]
identifier[output_shape] [ literal[int] ]= identifier[input_shape] [ literal[int] ]
identifier[output_shape] [ literal[int] ]= identifier[params] . identifier[outputChannels]
identifier[dilations] =[ literal[int] , literal[int] ]
keyword[if] identifier[len] ( identifier[params] . identifier[dilationFactor] )> literal[int] :
identifier[dilations] =[ identifier[params] . identifier[dilationFactor] [ literal[int] ], identifier[params] . identifier[dilationFactor] [ literal[int] ]]
identifier[kernel_shape] =[ literal[int] , literal[int] ]
keyword[if] identifier[len] ( identifier[params] . identifier[kernelSize] )> literal[int] :
identifier[kernel_shape] = identifier[params] . identifier[kernelSize]
identifier[strides] =[ literal[int] , literal[int] ]
keyword[if] identifier[len] ( identifier[params] . identifier[stride] )> literal[int] :
identifier[strides] = identifier[params] . identifier[stride]
identifier[specified_output_shape] =[ literal[int] , literal[int] ]
keyword[if] identifier[params] . identifier[isDeconvolution] keyword[and] identifier[len] ( identifier[params] . identifier[outputShape] )> literal[int] :
identifier[specified_output_shape] = identifier[list] ( identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[params] . identifier[outputShape] )
identifier[pad_mode] = identifier[params] . identifier[WhichOneof] ( literal[string] )
keyword[if] identifier[pad_mode] == literal[string] keyword[and] identifier[len] ( identifier[params] . identifier[valid] . identifier[paddingAmounts] . identifier[borderAmounts] )> literal[int] :
identifier[pad_amounts] = identifier[params] . identifier[valid] . identifier[paddingAmounts] . identifier[borderAmounts]
identifier[pad_heads] =[ identifier[pad_amounts] [ literal[int] ]. identifier[startEdgeSize] , identifier[pad_amounts] [ literal[int] ]. identifier[startEdgeSize] ]
identifier[pad_tails] =[ identifier[pad_amounts] [ literal[int] ]. identifier[endEdgeSize] , identifier[pad_amounts] [ literal[int] ]. identifier[endEdgeSize] ]
keyword[else] :
identifier[pad_heads] =[ literal[int] , literal[int] ]
identifier[pad_tails] =[ literal[int] , literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
keyword[if] identifier[params] . identifier[isDeconvolution] :
identifier[output_shape] [ identifier[i] + literal[int] ]= identifier[calculate_convolution_transpose_1D_output_shape] (
identifier[input_shape] [ identifier[i] + literal[int] ], identifier[kernel_shape] [ identifier[i] ], identifier[dilations] [ identifier[i] ], identifier[strides] [ identifier[i] ],
identifier[pad_mode] , identifier[pad_heads] [ identifier[i] ], identifier[pad_tails] [ identifier[i] ], identifier[specified_output_shape] [ identifier[i] ])
keyword[else] :
identifier[output_shape] [ identifier[i] + literal[int] ]= identifier[calculate_convolution_and_pooling_1D_output_shape] (
identifier[input_shape] [ identifier[i] + literal[int] ], identifier[kernel_shape] [ identifier[i] ], identifier[dilations] [ identifier[i] ], identifier[strides] [ identifier[i] ],
identifier[pad_mode] , identifier[pad_heads] [ identifier[i] ], identifier[pad_tails] [ identifier[i] ]) | def calculate_convolution_output_shapes(operator):
"""
Allowed input/output patterns are
1. [N, C, H, W] ---> [N, C, H', W']
"""
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
params = operator.raw_operator.convolution
input_shape = operator.inputs[0].type.shape
operator.outputs[0].type.shape = [0, 0, 0, 0] # Initialize output shape. It will be modified below.
output_shape = operator.outputs[0].type.shape
# Adjust N-axis
output_shape[0] = input_shape[0]
# Adjust C-axis
output_shape[1] = params.outputChannels
# Set up default and non-default parameters
dilations = [1, 1]
if len(params.dilationFactor) > 0:
dilations = [params.dilationFactor[0], params.dilationFactor[1]] # depends on [control=['if'], data=[]]
kernel_shape = [3, 3]
if len(params.kernelSize) > 0:
kernel_shape = params.kernelSize # depends on [control=['if'], data=[]]
strides = [1, 1]
if len(params.stride) > 0:
strides = params.stride # depends on [control=['if'], data=[]]
specified_output_shape = [0, 0] # Only used with convolution transpose
if params.isDeconvolution and len(params.outputShape) > 0:
specified_output_shape = list((int(i) for i in params.outputShape)) # depends on [control=['if'], data=[]]
pad_mode = params.WhichOneof('ConvolutionPaddingType')
if pad_mode == 'valid' and len(params.valid.paddingAmounts.borderAmounts) > 0:
pad_amounts = params.valid.paddingAmounts.borderAmounts
pad_heads = [pad_amounts[0].startEdgeSize, pad_amounts[1].startEdgeSize]
pad_tails = [pad_amounts[0].endEdgeSize, pad_amounts[1].endEdgeSize] # depends on [control=['if'], data=[]]
else:
# Padding amounts are useless for same padding and valid padding uses [0, 0] by default.
pad_heads = [0, 0]
pad_tails = [0, 0]
# Adjust H- and W-axes
for i in range(2):
if params.isDeconvolution:
output_shape[i + 2] = calculate_convolution_transpose_1D_output_shape(input_shape[i + 2], kernel_shape[i], dilations[i], strides[i], pad_mode, pad_heads[i], pad_tails[i], specified_output_shape[i]) # depends on [control=['if'], data=[]]
else:
output_shape[i + 2] = calculate_convolution_and_pooling_1D_output_shape(input_shape[i + 2], kernel_shape[i], dilations[i], strides[i], pad_mode, pad_heads[i], pad_tails[i]) # depends on [control=['for'], data=['i']] |
def visit_module(self, node):
"""visit an astroid.Module node
* set the locals_type mapping
* set the depends mapping
* optionally tag the node with a unique id
"""
if hasattr(node, "locals_type"):
return
node.locals_type = collections.defaultdict(list)
node.depends = []
if self.tag:
node.uid = self.generate_id() | def function[visit_module, parameter[self, node]]:
constant[visit an astroid.Module node
* set the locals_type mapping
* set the depends mapping
* optionally tag the node with a unique id
]
if call[name[hasattr], parameter[name[node], constant[locals_type]]] begin[:]
return[None]
name[node].locals_type assign[=] call[name[collections].defaultdict, parameter[name[list]]]
name[node].depends assign[=] list[[]]
if name[self].tag begin[:]
name[node].uid assign[=] call[name[self].generate_id, parameter[]] | keyword[def] identifier[visit_module] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[node] , literal[string] ):
keyword[return]
identifier[node] . identifier[locals_type] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
identifier[node] . identifier[depends] =[]
keyword[if] identifier[self] . identifier[tag] :
identifier[node] . identifier[uid] = identifier[self] . identifier[generate_id] () | def visit_module(self, node):
"""visit an astroid.Module node
* set the locals_type mapping
* set the depends mapping
* optionally tag the node with a unique id
"""
if hasattr(node, 'locals_type'):
return # depends on [control=['if'], data=[]]
node.locals_type = collections.defaultdict(list)
node.depends = []
if self.tag:
node.uid = self.generate_id() # depends on [control=['if'], data=[]] |
def is_dark_font_color(color_scheme):
"""Check if the font color used in the color scheme is dark."""
color_scheme = get_color_scheme(color_scheme)
font_color, fon_fw, fon_fs = color_scheme['normal']
return dark_color(font_color) | def function[is_dark_font_color, parameter[color_scheme]]:
constant[Check if the font color used in the color scheme is dark.]
variable[color_scheme] assign[=] call[name[get_color_scheme], parameter[name[color_scheme]]]
<ast.Tuple object at 0x7da18dc06c50> assign[=] call[name[color_scheme]][constant[normal]]
return[call[name[dark_color], parameter[name[font_color]]]] | keyword[def] identifier[is_dark_font_color] ( identifier[color_scheme] ):
literal[string]
identifier[color_scheme] = identifier[get_color_scheme] ( identifier[color_scheme] )
identifier[font_color] , identifier[fon_fw] , identifier[fon_fs] = identifier[color_scheme] [ literal[string] ]
keyword[return] identifier[dark_color] ( identifier[font_color] ) | def is_dark_font_color(color_scheme):
"""Check if the font color used in the color scheme is dark."""
color_scheme = get_color_scheme(color_scheme)
(font_color, fon_fw, fon_fs) = color_scheme['normal']
return dark_color(font_color) |
def write(self, image, options, thumbnail):
"""
Writes the thumbnail image
"""
args = settings.THUMBNAIL_VIPSTHUMBNAIL.split(' ')
args.append(image['source'])
for k in image['options']:
v = image['options'][k]
args.append('--%s' % k)
if v is not None:
args.append('%s' % v)
suffix = '.%s' % EXTENSIONS[options['format']]
write_options = []
if options['format'] == 'JPEG' and options.get(
'progressive', settings.THUMBNAIL_PROGRESSIVE):
write_options.append("interlace")
if options['quality']:
if options['format'] == 'JPEG':
write_options.append("Q=%d" % options['quality'])
with NamedTemporaryFile(suffix=suffix, mode='rb') as fp:
# older vipsthumbails used -o, this was renamed to -f in 8.0, use
# -o here for commpatibility
args.append("-o")
args.append(fp.name + "[%s]" % ",".join(write_options))
args = map(smart_str, args)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
out, err = p.communicate()
if err:
raise Exception(err)
thumbnail.write(fp.read()) | def function[write, parameter[self, image, options, thumbnail]]:
constant[
Writes the thumbnail image
]
variable[args] assign[=] call[name[settings].THUMBNAIL_VIPSTHUMBNAIL.split, parameter[constant[ ]]]
call[name[args].append, parameter[call[name[image]][constant[source]]]]
for taget[name[k]] in starred[call[name[image]][constant[options]]] begin[:]
variable[v] assign[=] call[call[name[image]][constant[options]]][name[k]]
call[name[args].append, parameter[binary_operation[constant[--%s] <ast.Mod object at 0x7da2590d6920> name[k]]]]
if compare[name[v] is_not constant[None]] begin[:]
call[name[args].append, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[v]]]]
variable[suffix] assign[=] binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> call[name[EXTENSIONS]][call[name[options]][constant[format]]]]
variable[write_options] assign[=] list[[]]
if <ast.BoolOp object at 0x7da18f8124d0> begin[:]
call[name[write_options].append, parameter[constant[interlace]]]
if call[name[options]][constant[quality]] begin[:]
if compare[call[name[options]][constant[format]] equal[==] constant[JPEG]] begin[:]
call[name[write_options].append, parameter[binary_operation[constant[Q=%d] <ast.Mod object at 0x7da2590d6920> call[name[options]][constant[quality]]]]]
with call[name[NamedTemporaryFile], parameter[]] begin[:]
call[name[args].append, parameter[constant[-o]]]
call[name[args].append, parameter[binary_operation[name[fp].name + binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> call[constant[,].join, parameter[name[write_options]]]]]]]
variable[args] assign[=] call[name[map], parameter[name[smart_str], name[args]]]
variable[p] assign[=] call[name[subprocess].Popen, parameter[name[args]]]
call[name[p].wait, parameter[]]
<ast.Tuple object at 0x7da18f812ad0> assign[=] call[name[p].communicate, parameter[]]
if name[err] begin[:]
<ast.Raise object at 0x7da18f811e40>
call[name[thumbnail].write, parameter[call[name[fp].read, parameter[]]]] | keyword[def] identifier[write] ( identifier[self] , identifier[image] , identifier[options] , identifier[thumbnail] ):
literal[string]
identifier[args] = identifier[settings] . identifier[THUMBNAIL_VIPSTHUMBNAIL] . identifier[split] ( literal[string] )
identifier[args] . identifier[append] ( identifier[image] [ literal[string] ])
keyword[for] identifier[k] keyword[in] identifier[image] [ literal[string] ]:
identifier[v] = identifier[image] [ literal[string] ][ identifier[k] ]
identifier[args] . identifier[append] ( literal[string] % identifier[k] )
keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] :
identifier[args] . identifier[append] ( literal[string] % identifier[v] )
identifier[suffix] = literal[string] % identifier[EXTENSIONS] [ identifier[options] [ literal[string] ]]
identifier[write_options] =[]
keyword[if] identifier[options] [ literal[string] ]== literal[string] keyword[and] identifier[options] . identifier[get] (
literal[string] , identifier[settings] . identifier[THUMBNAIL_PROGRESSIVE] ):
identifier[write_options] . identifier[append] ( literal[string] )
keyword[if] identifier[options] [ literal[string] ]:
keyword[if] identifier[options] [ literal[string] ]== literal[string] :
identifier[write_options] . identifier[append] ( literal[string] % identifier[options] [ literal[string] ])
keyword[with] identifier[NamedTemporaryFile] ( identifier[suffix] = identifier[suffix] , identifier[mode] = literal[string] ) keyword[as] identifier[fp] :
identifier[args] . identifier[append] ( literal[string] )
identifier[args] . identifier[append] ( identifier[fp] . identifier[name] + literal[string] % literal[string] . identifier[join] ( identifier[write_options] ))
identifier[args] = identifier[map] ( identifier[smart_str] , identifier[args] )
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[args] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[p] . identifier[wait] ()
identifier[out] , identifier[err] = identifier[p] . identifier[communicate] ()
keyword[if] identifier[err] :
keyword[raise] identifier[Exception] ( identifier[err] )
identifier[thumbnail] . identifier[write] ( identifier[fp] . identifier[read] ()) | def write(self, image, options, thumbnail):
"""
Writes the thumbnail image
"""
args = settings.THUMBNAIL_VIPSTHUMBNAIL.split(' ')
args.append(image['source'])
for k in image['options']:
v = image['options'][k]
args.append('--%s' % k)
if v is not None:
args.append('%s' % v) # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=['k']]
suffix = '.%s' % EXTENSIONS[options['format']]
write_options = []
if options['format'] == 'JPEG' and options.get('progressive', settings.THUMBNAIL_PROGRESSIVE):
write_options.append('interlace') # depends on [control=['if'], data=[]]
if options['quality']:
if options['format'] == 'JPEG':
write_options.append('Q=%d' % options['quality']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
with NamedTemporaryFile(suffix=suffix, mode='rb') as fp:
# older vipsthumbails used -o, this was renamed to -f in 8.0, use
# -o here for commpatibility
args.append('-o')
args.append(fp.name + '[%s]' % ','.join(write_options))
args = map(smart_str, args)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
(out, err) = p.communicate()
if err:
raise Exception(err) # depends on [control=['if'], data=[]]
thumbnail.write(fp.read()) # depends on [control=['with'], data=['fp']] |
def _BuildIndex(self):
"""Recreate the key index."""
self._index = {}
for i, k in enumerate(self._keys):
self._index[k] = i | def function[_BuildIndex, parameter[self]]:
constant[Recreate the key index.]
name[self]._index assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1ff4880>, <ast.Name object at 0x7da1b1ff4fd0>]]] in starred[call[name[enumerate], parameter[name[self]._keys]]] begin[:]
call[name[self]._index][name[k]] assign[=] name[i] | keyword[def] identifier[_BuildIndex] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_index] ={}
keyword[for] identifier[i] , identifier[k] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_keys] ):
identifier[self] . identifier[_index] [ identifier[k] ]= identifier[i] | def _BuildIndex(self):
"""Recreate the key index."""
self._index = {}
for (i, k) in enumerate(self._keys):
self._index[k] = i # depends on [control=['for'], data=[]] |
def delete(self):
"""Delete a file instance.
The file instance can be deleted if it has no references from other
objects. The caller is responsible to test if the file instance is
writable and that the disk file can actually be removed.
.. note::
Normally you should use the Celery task to delete a file instance,
as this method will not remove the file on disk.
"""
self.query.filter_by(id=self.id).delete()
return self | def function[delete, parameter[self]]:
constant[Delete a file instance.
The file instance can be deleted if it has no references from other
objects. The caller is responsible to test if the file instance is
writable and that the disk file can actually be removed.
.. note::
Normally you should use the Celery task to delete a file instance,
as this method will not remove the file on disk.
]
call[call[name[self].query.filter_by, parameter[]].delete, parameter[]]
return[name[self]] | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
identifier[self] . identifier[query] . identifier[filter_by] ( identifier[id] = identifier[self] . identifier[id] ). identifier[delete] ()
keyword[return] identifier[self] | def delete(self):
"""Delete a file instance.
The file instance can be deleted if it has no references from other
objects. The caller is responsible to test if the file instance is
writable and that the disk file can actually be removed.
.. note::
Normally you should use the Celery task to delete a file instance,
as this method will not remove the file on disk.
"""
self.query.filter_by(id=self.id).delete()
return self |
def render(self):
'''
.. versionchanged:: 0.12
Add ``dynamic_electrode_state_shapes`` layer to show dynamic
electrode actuations.
'''
# Render each layer and update data frame with new content for each
# surface.
surface_names = ('background', 'shapes', 'connections', 'routes',
'channel_labels', 'static_electrode_state_shapes',
'dynamic_electrode_state_shapes', 'registration')
for k in surface_names:
self.set_surface(k, getattr(self, 'render_' + k)())
self.emit('surfaces-reset', self.df_surfaces)
self.cairo_surface = flatten_surfaces(self.df_surfaces) | def function[render, parameter[self]]:
constant[
.. versionchanged:: 0.12
Add ``dynamic_electrode_state_shapes`` layer to show dynamic
electrode actuations.
]
variable[surface_names] assign[=] tuple[[<ast.Constant object at 0x7da1b277c280>, <ast.Constant object at 0x7da1b277c850>, <ast.Constant object at 0x7da1b277d390>, <ast.Constant object at 0x7da1b2774f40>, <ast.Constant object at 0x7da1b2775f60>, <ast.Constant object at 0x7da1b27a5540>, <ast.Constant object at 0x7da1b27a54e0>, <ast.Constant object at 0x7da1b27a6b60>]]
for taget[name[k]] in starred[name[surface_names]] begin[:]
call[name[self].set_surface, parameter[name[k], call[call[name[getattr], parameter[name[self], binary_operation[constant[render_] + name[k]]]], parameter[]]]]
call[name[self].emit, parameter[constant[surfaces-reset], name[self].df_surfaces]]
name[self].cairo_surface assign[=] call[name[flatten_surfaces], parameter[name[self].df_surfaces]] | keyword[def] identifier[render] ( identifier[self] ):
literal[string]
identifier[surface_names] =( literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] )
keyword[for] identifier[k] keyword[in] identifier[surface_names] :
identifier[self] . identifier[set_surface] ( identifier[k] , identifier[getattr] ( identifier[self] , literal[string] + identifier[k] )())
identifier[self] . identifier[emit] ( literal[string] , identifier[self] . identifier[df_surfaces] )
identifier[self] . identifier[cairo_surface] = identifier[flatten_surfaces] ( identifier[self] . identifier[df_surfaces] ) | def render(self):
"""
.. versionchanged:: 0.12
Add ``dynamic_electrode_state_shapes`` layer to show dynamic
electrode actuations.
"""
# Render each layer and update data frame with new content for each
# surface.
surface_names = ('background', 'shapes', 'connections', 'routes', 'channel_labels', 'static_electrode_state_shapes', 'dynamic_electrode_state_shapes', 'registration')
for k in surface_names:
self.set_surface(k, getattr(self, 'render_' + k)()) # depends on [control=['for'], data=['k']]
self.emit('surfaces-reset', self.df_surfaces)
self.cairo_surface = flatten_surfaces(self.df_surfaces) |
def keys(self, args):
"""
keys wrapper that queries every shard. This is an expensive
operation.
This method should be invoked on a TwemRedis instance as if it
were being invoked directly on a StrictRedis instance.
"""
results = {}
# TODO: parallelize
for shard_num in range(0, self.num_shards()):
shard = self.get_shard_by_num(shard_num)
results[shard_num] = shard.keys(args)
return results | def function[keys, parameter[self, args]]:
constant[
keys wrapper that queries every shard. This is an expensive
operation.
This method should be invoked on a TwemRedis instance as if it
were being invoked directly on a StrictRedis instance.
]
variable[results] assign[=] dictionary[[], []]
for taget[name[shard_num]] in starred[call[name[range], parameter[constant[0], call[name[self].num_shards, parameter[]]]]] begin[:]
variable[shard] assign[=] call[name[self].get_shard_by_num, parameter[name[shard_num]]]
call[name[results]][name[shard_num]] assign[=] call[name[shard].keys, parameter[name[args]]]
return[name[results]] | keyword[def] identifier[keys] ( identifier[self] , identifier[args] ):
literal[string]
identifier[results] ={}
keyword[for] identifier[shard_num] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[num_shards] ()):
identifier[shard] = identifier[self] . identifier[get_shard_by_num] ( identifier[shard_num] )
identifier[results] [ identifier[shard_num] ]= identifier[shard] . identifier[keys] ( identifier[args] )
keyword[return] identifier[results] | def keys(self, args):
"""
keys wrapper that queries every shard. This is an expensive
operation.
This method should be invoked on a TwemRedis instance as if it
were being invoked directly on a StrictRedis instance.
"""
results = {}
# TODO: parallelize
for shard_num in range(0, self.num_shards()):
shard = self.get_shard_by_num(shard_num)
results[shard_num] = shard.keys(args) # depends on [control=['for'], data=['shard_num']]
return results |
def check(self, query):
"""
:param query:
"""
if query.get_type() in {Keyword.LIST, Keyword.DROP}:
series = query.series_stmt
else:
series = query.from_stmt
if len(series) >= self.min_series_name_length:
return Ok(True)
return Err("Series name too short. Please be more precise.") | def function[check, parameter[self, query]]:
constant[
:param query:
]
if compare[call[name[query].get_type, parameter[]] in <ast.Set object at 0x7da1aff1dfc0>] begin[:]
variable[series] assign[=] name[query].series_stmt
if compare[call[name[len], parameter[name[series]]] greater_or_equal[>=] name[self].min_series_name_length] begin[:]
return[call[name[Ok], parameter[constant[True]]]]
return[call[name[Err], parameter[constant[Series name too short. Please be more precise.]]]] | keyword[def] identifier[check] ( identifier[self] , identifier[query] ):
literal[string]
keyword[if] identifier[query] . identifier[get_type] () keyword[in] { identifier[Keyword] . identifier[LIST] , identifier[Keyword] . identifier[DROP] }:
identifier[series] = identifier[query] . identifier[series_stmt]
keyword[else] :
identifier[series] = identifier[query] . identifier[from_stmt]
keyword[if] identifier[len] ( identifier[series] )>= identifier[self] . identifier[min_series_name_length] :
keyword[return] identifier[Ok] ( keyword[True] )
keyword[return] identifier[Err] ( literal[string] ) | def check(self, query):
"""
:param query:
"""
if query.get_type() in {Keyword.LIST, Keyword.DROP}:
series = query.series_stmt # depends on [control=['if'], data=[]]
else:
series = query.from_stmt
if len(series) >= self.min_series_name_length:
return Ok(True) # depends on [control=['if'], data=[]]
return Err('Series name too short. Please be more precise.') |
def adjacent(self,rng2):
""" Test for adjacency.
:param rng2:
:param use_direction: false by default
:param type: GenomicRange
:param type: use_direction
"""
if self.chr != rng2.chr: return False
if self.direction != rng2.direction and use_direction: return False
if self.end == rng2.start-1: return True
if self.start-1 == rng2.end: return True
return False | def function[adjacent, parameter[self, rng2]]:
constant[ Test for adjacency.
:param rng2:
:param use_direction: false by default
:param type: GenomicRange
:param type: use_direction
]
if compare[name[self].chr not_equal[!=] name[rng2].chr] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da20e955990> begin[:]
return[constant[False]]
if compare[name[self].end equal[==] binary_operation[name[rng2].start - constant[1]]] begin[:]
return[constant[True]]
if compare[binary_operation[name[self].start - constant[1]] equal[==] name[rng2].end] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[adjacent] ( identifier[self] , identifier[rng2] ):
literal[string]
keyword[if] identifier[self] . identifier[chr] != identifier[rng2] . identifier[chr] : keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[direction] != identifier[rng2] . identifier[direction] keyword[and] identifier[use_direction] : keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[end] == identifier[rng2] . identifier[start] - literal[int] : keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[start] - literal[int] == identifier[rng2] . identifier[end] : keyword[return] keyword[True]
keyword[return] keyword[False] | def adjacent(self, rng2):
""" Test for adjacency.
:param rng2:
:param use_direction: false by default
:param type: GenomicRange
:param type: use_direction
"""
if self.chr != rng2.chr:
return False # depends on [control=['if'], data=[]]
if self.direction != rng2.direction and use_direction:
return False # depends on [control=['if'], data=[]]
if self.end == rng2.start - 1:
return True # depends on [control=['if'], data=[]]
if self.start - 1 == rng2.end:
return True # depends on [control=['if'], data=[]]
return False |
def text(self):
"""Parses problems.txt and returns problem text"""
def _problem_iter(problem_num):
problem_file = os.path.join(EULER_DATA, 'problems.txt')
with open(problem_file) as f:
is_problem = False
last_line = ''
for line in f:
if line.strip() == 'Problem %i' % problem_num:
is_problem = True
if is_problem:
if line == last_line == '\n':
break
else:
yield line[:-1]
last_line = line
problem_lines = [line for line in _problem_iter(self.num)]
if problem_lines:
# First three lines are the problem number, the divider line,
# and a newline, so don't include them in the returned string.
# Also, strip the final newline.
return '\n'.join(problem_lines[3:-1])
else:
msg = 'Problem %i not found in problems.txt.' % self.num
click.secho(msg, fg='red')
click.echo('If this problem exists on Project Euler, consider '
'submitting a pull request to EulerPy on GitHub.')
sys.exit(1) | def function[text, parameter[self]]:
constant[Parses problems.txt and returns problem text]
def function[_problem_iter, parameter[problem_num]]:
variable[problem_file] assign[=] call[name[os].path.join, parameter[name[EULER_DATA], constant[problems.txt]]]
with call[name[open], parameter[name[problem_file]]] begin[:]
variable[is_problem] assign[=] constant[False]
variable[last_line] assign[=] constant[]
for taget[name[line]] in starred[name[f]] begin[:]
if compare[call[name[line].strip, parameter[]] equal[==] binary_operation[constant[Problem %i] <ast.Mod object at 0x7da2590d6920> name[problem_num]]] begin[:]
variable[is_problem] assign[=] constant[True]
if name[is_problem] begin[:]
if compare[name[line] equal[==] name[last_line]] begin[:]
break
variable[problem_lines] assign[=] <ast.ListComp object at 0x7da1b07af550>
if name[problem_lines] begin[:]
return[call[constant[
].join, parameter[call[name[problem_lines]][<ast.Slice object at 0x7da1b07ae890>]]]] | keyword[def] identifier[text] ( identifier[self] ):
literal[string]
keyword[def] identifier[_problem_iter] ( identifier[problem_num] ):
identifier[problem_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[EULER_DATA] , literal[string] )
keyword[with] identifier[open] ( identifier[problem_file] ) keyword[as] identifier[f] :
identifier[is_problem] = keyword[False]
identifier[last_line] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[f] :
keyword[if] identifier[line] . identifier[strip] ()== literal[string] % identifier[problem_num] :
identifier[is_problem] = keyword[True]
keyword[if] identifier[is_problem] :
keyword[if] identifier[line] == identifier[last_line] == literal[string] :
keyword[break]
keyword[else] :
keyword[yield] identifier[line] [:- literal[int] ]
identifier[last_line] = identifier[line]
identifier[problem_lines] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[_problem_iter] ( identifier[self] . identifier[num] )]
keyword[if] identifier[problem_lines] :
keyword[return] literal[string] . identifier[join] ( identifier[problem_lines] [ literal[int] :- literal[int] ])
keyword[else] :
identifier[msg] = literal[string] % identifier[self] . identifier[num]
identifier[click] . identifier[secho] ( identifier[msg] , identifier[fg] = literal[string] )
identifier[click] . identifier[echo] ( literal[string]
literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def text(self):
"""Parses problems.txt and returns problem text"""
def _problem_iter(problem_num):
problem_file = os.path.join(EULER_DATA, 'problems.txt')
with open(problem_file) as f:
is_problem = False
last_line = ''
for line in f:
if line.strip() == 'Problem %i' % problem_num:
is_problem = True # depends on [control=['if'], data=[]]
if is_problem:
if line == last_line == '\n':
break # depends on [control=['if'], data=[]]
else:
yield line[:-1]
last_line = line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
problem_lines = [line for line in _problem_iter(self.num)]
if problem_lines:
# First three lines are the problem number, the divider line,
# and a newline, so don't include them in the returned string.
# Also, strip the final newline.
return '\n'.join(problem_lines[3:-1]) # depends on [control=['if'], data=[]]
else:
msg = 'Problem %i not found in problems.txt.' % self.num
click.secho(msg, fg='red')
click.echo('If this problem exists on Project Euler, consider submitting a pull request to EulerPy on GitHub.')
sys.exit(1) |
def main(api_endpoint, credentials,
device_model_id, device_id, lang, verbose,
input_audio_file, output_audio_file,
block_size, grpc_deadline, *args, **kwargs):
"""File based sample for the Google Assistant API.
Examples:
$ python -m audiofileinput -i <input file> -o <output file>
"""
# Setup logging.
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
# Load OAuth 2.0 credentials.
try:
with open(credentials, 'r') as f:
credentials = google.oauth2.credentials.Credentials(token=None,
**json.load(f))
http_request = google.auth.transport.requests.Request()
credentials.refresh(http_request)
except Exception as e:
logging.error('Error loading credentials: %s', e)
logging.error('Run google-oauthlib-tool to initialize '
'new OAuth 2.0 credentials.')
sys.exit(-1)
# Create an authorized gRPC channel.
grpc_channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, http_request, api_endpoint)
logging.info('Connecting to %s', api_endpoint)
# Create gRPC stubs
assistant = embedded_assistant_pb2_grpc.EmbeddedAssistantStub(grpc_channel)
# Generate gRPC requests.
def gen_assist_requests(input_stream):
dialog_state_in = embedded_assistant_pb2.DialogStateIn(
language_code=lang,
conversation_state=b''
)
config = embedded_assistant_pb2.AssistConfig(
audio_in_config=embedded_assistant_pb2.AudioInConfig(
encoding='LINEAR16',
sample_rate_hertz=16000,
),
audio_out_config=embedded_assistant_pb2.AudioOutConfig(
encoding='LINEAR16',
sample_rate_hertz=16000,
volume_percentage=100,
),
dialog_state_in=dialog_state_in,
device_config=embedded_assistant_pb2.DeviceConfig(
device_id=device_id,
device_model_id=device_model_id,
)
)
# Send first AssistRequest message with configuration.
yield embedded_assistant_pb2.AssistRequest(config=config)
while True:
# Read user request from file.
data = input_stream.read(block_size)
if not data:
break
# Send following AssitRequest message with audio chunks.
yield embedded_assistant_pb2.AssistRequest(audio_in=data)
for resp in assistant.Assist(gen_assist_requests(input_audio_file),
grpc_deadline):
# Iterate on AssistResponse messages.
if resp.event_type == END_OF_UTTERANCE:
logging.info('End of audio request detected')
if resp.speech_results:
logging.info('Transcript of user request: "%s".',
' '.join(r.transcript
for r in resp.speech_results))
if len(resp.audio_out.audio_data) > 0:
# Write assistant response to supplied file.
output_audio_file.write(resp.audio_out.audio_data)
if resp.dialog_state_out.supplemental_display_text:
logging.info('Assistant display text: "%s"',
resp.dialog_state_out.supplemental_display_text)
if resp.device_action.device_request_json:
device_request = json.loads(resp.device_action.device_request_json)
logging.info('Device request: %s', device_request) | def function[main, parameter[api_endpoint, credentials, device_model_id, device_id, lang, verbose, input_audio_file, output_audio_file, block_size, grpc_deadline]]:
constant[File based sample for the Google Assistant API.
Examples:
$ python -m audiofileinput -i <input file> -o <output file>
]
call[name[logging].basicConfig, parameter[]]
<ast.Try object at 0x7da18f7232b0>
variable[grpc_channel] assign[=] call[name[google].auth.transport.grpc.secure_authorized_channel, parameter[name[credentials], name[http_request], name[api_endpoint]]]
call[name[logging].info, parameter[constant[Connecting to %s], name[api_endpoint]]]
variable[assistant] assign[=] call[name[embedded_assistant_pb2_grpc].EmbeddedAssistantStub, parameter[name[grpc_channel]]]
def function[gen_assist_requests, parameter[input_stream]]:
variable[dialog_state_in] assign[=] call[name[embedded_assistant_pb2].DialogStateIn, parameter[]]
variable[config] assign[=] call[name[embedded_assistant_pb2].AssistConfig, parameter[]]
<ast.Yield object at 0x7da18f720370>
while constant[True] begin[:]
variable[data] assign[=] call[name[input_stream].read, parameter[name[block_size]]]
if <ast.UnaryOp object at 0x7da18f723cd0> begin[:]
break
<ast.Yield object at 0x7da18f722980>
for taget[name[resp]] in starred[call[name[assistant].Assist, parameter[call[name[gen_assist_requests], parameter[name[input_audio_file]]], name[grpc_deadline]]]] begin[:]
if compare[name[resp].event_type equal[==] name[END_OF_UTTERANCE]] begin[:]
call[name[logging].info, parameter[constant[End of audio request detected]]]
if name[resp].speech_results begin[:]
call[name[logging].info, parameter[constant[Transcript of user request: "%s".], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da2041da170>]]]]
if compare[call[name[len], parameter[name[resp].audio_out.audio_data]] greater[>] constant[0]] begin[:]
call[name[output_audio_file].write, parameter[name[resp].audio_out.audio_data]]
if name[resp].dialog_state_out.supplemental_display_text begin[:]
call[name[logging].info, parameter[constant[Assistant display text: "%s"], name[resp].dialog_state_out.supplemental_display_text]]
if name[resp].device_action.device_request_json begin[:]
variable[device_request] assign[=] call[name[json].loads, parameter[name[resp].device_action.device_request_json]]
call[name[logging].info, parameter[constant[Device request: %s], name[device_request]]] | keyword[def] identifier[main] ( identifier[api_endpoint] , identifier[credentials] ,
identifier[device_model_id] , identifier[device_id] , identifier[lang] , identifier[verbose] ,
identifier[input_audio_file] , identifier[output_audio_file] ,
identifier[block_size] , identifier[grpc_deadline] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[DEBUG] keyword[if] identifier[verbose] keyword[else] identifier[logging] . identifier[INFO] )
keyword[try] :
keyword[with] identifier[open] ( identifier[credentials] , literal[string] ) keyword[as] identifier[f] :
identifier[credentials] = identifier[google] . identifier[oauth2] . identifier[credentials] . identifier[Credentials] ( identifier[token] = keyword[None] ,
** identifier[json] . identifier[load] ( identifier[f] ))
identifier[http_request] = identifier[google] . identifier[auth] . identifier[transport] . identifier[requests] . identifier[Request] ()
identifier[credentials] . identifier[refresh] ( identifier[http_request] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( literal[string] , identifier[e] )
identifier[logging] . identifier[error] ( literal[string]
literal[string] )
identifier[sys] . identifier[exit] (- literal[int] )
identifier[grpc_channel] = identifier[google] . identifier[auth] . identifier[transport] . identifier[grpc] . identifier[secure_authorized_channel] (
identifier[credentials] , identifier[http_request] , identifier[api_endpoint] )
identifier[logging] . identifier[info] ( literal[string] , identifier[api_endpoint] )
identifier[assistant] = identifier[embedded_assistant_pb2_grpc] . identifier[EmbeddedAssistantStub] ( identifier[grpc_channel] )
keyword[def] identifier[gen_assist_requests] ( identifier[input_stream] ):
identifier[dialog_state_in] = identifier[embedded_assistant_pb2] . identifier[DialogStateIn] (
identifier[language_code] = identifier[lang] ,
identifier[conversation_state] = literal[string]
)
identifier[config] = identifier[embedded_assistant_pb2] . identifier[AssistConfig] (
identifier[audio_in_config] = identifier[embedded_assistant_pb2] . identifier[AudioInConfig] (
identifier[encoding] = literal[string] ,
identifier[sample_rate_hertz] = literal[int] ,
),
identifier[audio_out_config] = identifier[embedded_assistant_pb2] . identifier[AudioOutConfig] (
identifier[encoding] = literal[string] ,
identifier[sample_rate_hertz] = literal[int] ,
identifier[volume_percentage] = literal[int] ,
),
identifier[dialog_state_in] = identifier[dialog_state_in] ,
identifier[device_config] = identifier[embedded_assistant_pb2] . identifier[DeviceConfig] (
identifier[device_id] = identifier[device_id] ,
identifier[device_model_id] = identifier[device_model_id] ,
)
)
keyword[yield] identifier[embedded_assistant_pb2] . identifier[AssistRequest] ( identifier[config] = identifier[config] )
keyword[while] keyword[True] :
identifier[data] = identifier[input_stream] . identifier[read] ( identifier[block_size] )
keyword[if] keyword[not] identifier[data] :
keyword[break]
keyword[yield] identifier[embedded_assistant_pb2] . identifier[AssistRequest] ( identifier[audio_in] = identifier[data] )
keyword[for] identifier[resp] keyword[in] identifier[assistant] . identifier[Assist] ( identifier[gen_assist_requests] ( identifier[input_audio_file] ),
identifier[grpc_deadline] ):
keyword[if] identifier[resp] . identifier[event_type] == identifier[END_OF_UTTERANCE] :
identifier[logging] . identifier[info] ( literal[string] )
keyword[if] identifier[resp] . identifier[speech_results] :
identifier[logging] . identifier[info] ( literal[string] ,
literal[string] . identifier[join] ( identifier[r] . identifier[transcript]
keyword[for] identifier[r] keyword[in] identifier[resp] . identifier[speech_results] ))
keyword[if] identifier[len] ( identifier[resp] . identifier[audio_out] . identifier[audio_data] )> literal[int] :
identifier[output_audio_file] . identifier[write] ( identifier[resp] . identifier[audio_out] . identifier[audio_data] )
keyword[if] identifier[resp] . identifier[dialog_state_out] . identifier[supplemental_display_text] :
identifier[logging] . identifier[info] ( literal[string] ,
identifier[resp] . identifier[dialog_state_out] . identifier[supplemental_display_text] )
keyword[if] identifier[resp] . identifier[device_action] . identifier[device_request_json] :
identifier[device_request] = identifier[json] . identifier[loads] ( identifier[resp] . identifier[device_action] . identifier[device_request_json] )
identifier[logging] . identifier[info] ( literal[string] , identifier[device_request] ) | def main(api_endpoint, credentials, device_model_id, device_id, lang, verbose, input_audio_file, output_audio_file, block_size, grpc_deadline, *args, **kwargs):
"""File based sample for the Google Assistant API.
Examples:
$ python -m audiofileinput -i <input file> -o <output file>
"""
# Setup logging.
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
# Load OAuth 2.0 credentials.
try:
with open(credentials, 'r') as f:
credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f))
http_request = google.auth.transport.requests.Request()
credentials.refresh(http_request) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except Exception as e:
logging.error('Error loading credentials: %s', e)
logging.error('Run google-oauthlib-tool to initialize new OAuth 2.0 credentials.')
sys.exit(-1) # depends on [control=['except'], data=['e']]
# Create an authorized gRPC channel.
grpc_channel = google.auth.transport.grpc.secure_authorized_channel(credentials, http_request, api_endpoint)
logging.info('Connecting to %s', api_endpoint)
# Create gRPC stubs
assistant = embedded_assistant_pb2_grpc.EmbeddedAssistantStub(grpc_channel)
# Generate gRPC requests.
def gen_assist_requests(input_stream):
dialog_state_in = embedded_assistant_pb2.DialogStateIn(language_code=lang, conversation_state=b'')
config = embedded_assistant_pb2.AssistConfig(audio_in_config=embedded_assistant_pb2.AudioInConfig(encoding='LINEAR16', sample_rate_hertz=16000), audio_out_config=embedded_assistant_pb2.AudioOutConfig(encoding='LINEAR16', sample_rate_hertz=16000, volume_percentage=100), dialog_state_in=dialog_state_in, device_config=embedded_assistant_pb2.DeviceConfig(device_id=device_id, device_model_id=device_model_id))
# Send first AssistRequest message with configuration.
yield embedded_assistant_pb2.AssistRequest(config=config)
while True:
# Read user request from file.
data = input_stream.read(block_size)
if not data:
break # depends on [control=['if'], data=[]]
# Send following AssitRequest message with audio chunks.
yield embedded_assistant_pb2.AssistRequest(audio_in=data) # depends on [control=['while'], data=[]]
for resp in assistant.Assist(gen_assist_requests(input_audio_file), grpc_deadline):
# Iterate on AssistResponse messages.
if resp.event_type == END_OF_UTTERANCE:
logging.info('End of audio request detected') # depends on [control=['if'], data=[]]
if resp.speech_results:
logging.info('Transcript of user request: "%s".', ' '.join((r.transcript for r in resp.speech_results))) # depends on [control=['if'], data=[]]
if len(resp.audio_out.audio_data) > 0:
# Write assistant response to supplied file.
output_audio_file.write(resp.audio_out.audio_data) # depends on [control=['if'], data=[]]
if resp.dialog_state_out.supplemental_display_text:
logging.info('Assistant display text: "%s"', resp.dialog_state_out.supplemental_display_text) # depends on [control=['if'], data=[]]
if resp.device_action.device_request_json:
device_request = json.loads(resp.device_action.device_request_json)
logging.info('Device request: %s', device_request) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['resp']] |
def snap_mismatched_borders(script, edge_dist_ratio=0.01, unify_vert=True):
""" Try to snap together adjacent borders that are slightly mismatched.
This situation can happen on badly triangulated adjacent patches defined by
high order surfaces. For each border vertex the filter snaps it onto the
closest boundary edge only if it is closest of edge_legth*threshold. When
vertex is snapped the corresponding face it split and a new vertex is
created.
Args:
script: the FilterScript object or script filename to write
the filter to.
edge_dist_ratio (float): Collapse edge when the edge / distance ratio
is greater than this value. E.g. for default value 1000 two
straight border edges are collapsed if the central vertex dist from
the straight line composed by the two edges less than a 1/1000 of
the sum of the edges length. Larger values enforce that only
vertexes very close to the line are removed.
unify_vert (bool): If true the snap vertices are welded together.
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
"""
filter_xml = ''.join([
' <filter name="Snap Mismatched Borders">\n',
' <Param name="EdgeDistRatio" ',
'value="{}" '.format(edge_dist_ratio),
'description="Edge Distance Ratio" ',
'type="RichFloat" ',
'/>\n',
' <Param name="UnifyVertices" ',
'value="{}" '.format(str(unify_vert).lower()),
'description="UnifyVertices" ',
'type="RichBool" ',
'/>\n',
' </filter>\n'])
util.write_filter(script, filter_xml)
return None | def function[snap_mismatched_borders, parameter[script, edge_dist_ratio, unify_vert]]:
constant[ Try to snap together adjacent borders that are slightly mismatched.
This situation can happen on badly triangulated adjacent patches defined by
high order surfaces. For each border vertex the filter snaps it onto the
closest boundary edge only if it is closest of edge_legth*threshold. When
vertex is snapped the corresponding face it split and a new vertex is
created.
Args:
script: the FilterScript object or script filename to write
the filter to.
edge_dist_ratio (float): Collapse edge when the edge / distance ratio
is greater than this value. E.g. for default value 1000 two
straight border edges are collapsed if the central vertex dist from
the straight line composed by the two edges less than a 1/1000 of
the sum of the edges length. Larger values enforce that only
vertexes very close to the line are removed.
unify_vert (bool): If true the snap vertices are welded together.
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
]
variable[filter_xml] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da18ede4460>, <ast.Constant object at 0x7da18ede5db0>, <ast.Call object at 0x7da18ede6b90>, <ast.Constant object at 0x7da18ede7d00>, <ast.Constant object at 0x7da18ede6f20>, <ast.Constant object at 0x7da18ede5a50>, <ast.Constant object at 0x7da18ede71f0>, <ast.Call object at 0x7da18ede7a90>, <ast.Constant object at 0x7da18ede5330>, <ast.Constant object at 0x7da18ede4bb0>, <ast.Constant object at 0x7da18ede7280>, <ast.Constant object at 0x7da18ede5c30>]]]]
call[name[util].write_filter, parameter[name[script], name[filter_xml]]]
return[constant[None]] | keyword[def] identifier[snap_mismatched_borders] ( identifier[script] , identifier[edge_dist_ratio] = literal[int] , identifier[unify_vert] = keyword[True] ):
literal[string]
identifier[filter_xml] = literal[string] . identifier[join] ([
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[edge_dist_ratio] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[str] ( identifier[unify_vert] ). identifier[lower] ()),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
identifier[util] . identifier[write_filter] ( identifier[script] , identifier[filter_xml] )
keyword[return] keyword[None] | def snap_mismatched_borders(script, edge_dist_ratio=0.01, unify_vert=True):
""" Try to snap together adjacent borders that are slightly mismatched.
This situation can happen on badly triangulated adjacent patches defined by
high order surfaces. For each border vertex the filter snaps it onto the
closest boundary edge only if it is closest of edge_legth*threshold. When
vertex is snapped the corresponding face it split and a new vertex is
created.
Args:
script: the FilterScript object or script filename to write
the filter to.
edge_dist_ratio (float): Collapse edge when the edge / distance ratio
is greater than this value. E.g. for default value 1000 two
straight border edges are collapsed if the central vertex dist from
the straight line composed by the two edges less than a 1/1000 of
the sum of the edges length. Larger values enforce that only
vertexes very close to the line are removed.
unify_vert (bool): If true the snap vertices are welded together.
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
"""
filter_xml = ''.join([' <filter name="Snap Mismatched Borders">\n', ' <Param name="EdgeDistRatio" ', 'value="{}" '.format(edge_dist_ratio), 'description="Edge Distance Ratio" ', 'type="RichFloat" ', '/>\n', ' <Param name="UnifyVertices" ', 'value="{}" '.format(str(unify_vert).lower()), 'description="UnifyVertices" ', 'type="RichBool" ', '/>\n', ' </filter>\n'])
util.write_filter(script, filter_xml)
return None |
def dictionary_validator(key_type, value_type):
"""Validator for ``attrs`` that performs deep type checking of dictionaries."""
def _validate_dictionary(instance, attribute, value):
# pylint: disable=unused-argument
"""Validate that a dictionary is structured as expected.
:raises TypeError: if ``value`` is not a dictionary
:raises TypeError: if ``value`` keys are not all of ``key_type`` type
:raises TypeError: if ``value`` values are not all of ``value_type`` type
"""
if not isinstance(value, dict):
raise TypeError('"{}" must be a dictionary'.format(attribute.name))
for key, data in value.items():
if not isinstance(key, key_type):
raise TypeError(
'"{name}" dictionary keys must be of type "{type}"'.format(name=attribute.name, type=key_type)
)
if not isinstance(data, value_type):
raise TypeError(
'"{name}" dictionary values must be of type "{type}"'.format(name=attribute.name, type=value_type)
)
return _validate_dictionary | def function[dictionary_validator, parameter[key_type, value_type]]:
constant[Validator for ``attrs`` that performs deep type checking of dictionaries.]
def function[_validate_dictionary, parameter[instance, attribute, value]]:
constant[Validate that a dictionary is structured as expected.
:raises TypeError: if ``value`` is not a dictionary
:raises TypeError: if ``value`` keys are not all of ``key_type`` type
:raises TypeError: if ``value`` values are not all of ``value_type`` type
]
if <ast.UnaryOp object at 0x7da20c76da50> begin[:]
<ast.Raise object at 0x7da20c76e050>
for taget[tuple[[<ast.Name object at 0x7da20c76e260>, <ast.Name object at 0x7da20c76ec80>]]] in starred[call[name[value].items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da20c76c490> begin[:]
<ast.Raise object at 0x7da20c76ef80>
if <ast.UnaryOp object at 0x7da20c76dea0> begin[:]
<ast.Raise object at 0x7da20c76ce50>
return[name[_validate_dictionary]] | keyword[def] identifier[dictionary_validator] ( identifier[key_type] , identifier[value_type] ):
literal[string]
keyword[def] identifier[_validate_dictionary] ( identifier[instance] , identifier[attribute] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[attribute] . identifier[name] ))
keyword[for] identifier[key] , identifier[data] keyword[in] identifier[value] . identifier[items] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[key_type] ):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[name] = identifier[attribute] . identifier[name] , identifier[type] = identifier[key_type] )
)
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[value_type] ):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[name] = identifier[attribute] . identifier[name] , identifier[type] = identifier[value_type] )
)
keyword[return] identifier[_validate_dictionary] | def dictionary_validator(key_type, value_type):
"""Validator for ``attrs`` that performs deep type checking of dictionaries."""
def _validate_dictionary(instance, attribute, value):
# pylint: disable=unused-argument
'Validate that a dictionary is structured as expected.\n\n :raises TypeError: if ``value`` is not a dictionary\n :raises TypeError: if ``value`` keys are not all of ``key_type`` type\n :raises TypeError: if ``value`` values are not all of ``value_type`` type\n '
if not isinstance(value, dict):
raise TypeError('"{}" must be a dictionary'.format(attribute.name)) # depends on [control=['if'], data=[]]
for (key, data) in value.items():
if not isinstance(key, key_type):
raise TypeError('"{name}" dictionary keys must be of type "{type}"'.format(name=attribute.name, type=key_type)) # depends on [control=['if'], data=[]]
if not isinstance(data, value_type):
raise TypeError('"{name}" dictionary values must be of type "{type}"'.format(name=attribute.name, type=value_type)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return _validate_dictionary |
def _search_keys(text, keyserver, user=None):
'''
Helper function for searching keys from keyserver
'''
gpg = _create_gpg(user)
if keyserver:
_keys = gpg.search_keys(text, keyserver)
else:
_keys = gpg.search_keys(text)
return _keys | def function[_search_keys, parameter[text, keyserver, user]]:
constant[
Helper function for searching keys from keyserver
]
variable[gpg] assign[=] call[name[_create_gpg], parameter[name[user]]]
if name[keyserver] begin[:]
variable[_keys] assign[=] call[name[gpg].search_keys, parameter[name[text], name[keyserver]]]
return[name[_keys]] | keyword[def] identifier[_search_keys] ( identifier[text] , identifier[keyserver] , identifier[user] = keyword[None] ):
literal[string]
identifier[gpg] = identifier[_create_gpg] ( identifier[user] )
keyword[if] identifier[keyserver] :
identifier[_keys] = identifier[gpg] . identifier[search_keys] ( identifier[text] , identifier[keyserver] )
keyword[else] :
identifier[_keys] = identifier[gpg] . identifier[search_keys] ( identifier[text] )
keyword[return] identifier[_keys] | def _search_keys(text, keyserver, user=None):
"""
Helper function for searching keys from keyserver
"""
gpg = _create_gpg(user)
if keyserver:
_keys = gpg.search_keys(text, keyserver) # depends on [control=['if'], data=[]]
else:
_keys = gpg.search_keys(text)
return _keys |
def single(method):
"""Decorator for RestServer methods that take a single address"""
@functools.wraps(method)
def single(self, address, value=None):
address = urllib.parse.unquote_plus(address)
try:
error = NO_PROJECT_ERROR
if not self.project:
raise ValueError
error = BAD_ADDRESS_ERROR
ed = editor.Editor(address, self.project)
if value is None:
error = BAD_GETTER_ERROR
result = method(self, ed)
else:
error = BAD_SETTER_ERROR
result = method(self, ed, value)
result = {'value': result}
except Exception as e:
traceback.print_exc()
msg = '%s\n%s' % (error.format(**locals()), e)
result = {'error': msg}
return flask.jsonify(result)
return single | def function[single, parameter[method]]:
constant[Decorator for RestServer methods that take a single address]
def function[single, parameter[self, address, value]]:
variable[address] assign[=] call[name[urllib].parse.unquote_plus, parameter[name[address]]]
<ast.Try object at 0x7da1b0062650>
return[call[name[flask].jsonify, parameter[name[result]]]]
return[name[single]] | keyword[def] identifier[single] ( identifier[method] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[method] )
keyword[def] identifier[single] ( identifier[self] , identifier[address] , identifier[value] = keyword[None] ):
identifier[address] = identifier[urllib] . identifier[parse] . identifier[unquote_plus] ( identifier[address] )
keyword[try] :
identifier[error] = identifier[NO_PROJECT_ERROR]
keyword[if] keyword[not] identifier[self] . identifier[project] :
keyword[raise] identifier[ValueError]
identifier[error] = identifier[BAD_ADDRESS_ERROR]
identifier[ed] = identifier[editor] . identifier[Editor] ( identifier[address] , identifier[self] . identifier[project] )
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[error] = identifier[BAD_GETTER_ERROR]
identifier[result] = identifier[method] ( identifier[self] , identifier[ed] )
keyword[else] :
identifier[error] = identifier[BAD_SETTER_ERROR]
identifier[result] = identifier[method] ( identifier[self] , identifier[ed] , identifier[value] )
identifier[result] ={ literal[string] : identifier[result] }
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[traceback] . identifier[print_exc] ()
identifier[msg] = literal[string] %( identifier[error] . identifier[format] (** identifier[locals] ()), identifier[e] )
identifier[result] ={ literal[string] : identifier[msg] }
keyword[return] identifier[flask] . identifier[jsonify] ( identifier[result] )
keyword[return] identifier[single] | def single(method):
"""Decorator for RestServer methods that take a single address"""
@functools.wraps(method)
def single(self, address, value=None):
address = urllib.parse.unquote_plus(address)
try:
error = NO_PROJECT_ERROR
if not self.project:
raise ValueError # depends on [control=['if'], data=[]]
error = BAD_ADDRESS_ERROR
ed = editor.Editor(address, self.project)
if value is None:
error = BAD_GETTER_ERROR
result = method(self, ed) # depends on [control=['if'], data=[]]
else:
error = BAD_SETTER_ERROR
result = method(self, ed, value)
result = {'value': result} # depends on [control=['try'], data=[]]
except Exception as e:
traceback.print_exc()
msg = '%s\n%s' % (error.format(**locals()), e)
result = {'error': msg} # depends on [control=['except'], data=['e']]
return flask.jsonify(result)
return single |
def _get_interleague_fl(cls, home_team_lg, away_team_lg):
"""
get inter league flg
:param home_team_lg: home team league
:param away_team_lg: away team league
:return: inter league flg(T or F or U)
"""
if (home_team_lg == MlbamConst.UNKNOWN_SHORT) or (away_team_lg == MlbamConst.UNKNOWN_SHORT):
return MlbamConst.UNKNOWN_SHORT
elif home_team_lg != away_team_lg:
return MlbamConst.FLG_TRUE
return MlbamConst.FLG_FALSE | def function[_get_interleague_fl, parameter[cls, home_team_lg, away_team_lg]]:
constant[
get inter league flg
:param home_team_lg: home team league
:param away_team_lg: away team league
:return: inter league flg(T or F or U)
]
if <ast.BoolOp object at 0x7da20c6e6290> begin[:]
return[name[MlbamConst].UNKNOWN_SHORT]
return[name[MlbamConst].FLG_FALSE] | keyword[def] identifier[_get_interleague_fl] ( identifier[cls] , identifier[home_team_lg] , identifier[away_team_lg] ):
literal[string]
keyword[if] ( identifier[home_team_lg] == identifier[MlbamConst] . identifier[UNKNOWN_SHORT] ) keyword[or] ( identifier[away_team_lg] == identifier[MlbamConst] . identifier[UNKNOWN_SHORT] ):
keyword[return] identifier[MlbamConst] . identifier[UNKNOWN_SHORT]
keyword[elif] identifier[home_team_lg] != identifier[away_team_lg] :
keyword[return] identifier[MlbamConst] . identifier[FLG_TRUE]
keyword[return] identifier[MlbamConst] . identifier[FLG_FALSE] | def _get_interleague_fl(cls, home_team_lg, away_team_lg):
"""
get inter league flg
:param home_team_lg: home team league
:param away_team_lg: away team league
:return: inter league flg(T or F or U)
"""
if home_team_lg == MlbamConst.UNKNOWN_SHORT or away_team_lg == MlbamConst.UNKNOWN_SHORT:
return MlbamConst.UNKNOWN_SHORT # depends on [control=['if'], data=[]]
elif home_team_lg != away_team_lg:
return MlbamConst.FLG_TRUE # depends on [control=['if'], data=[]]
return MlbamConst.FLG_FALSE |
def export_env_string(name, value):
# type: (AEnvName, AEnvValue) -> ADefine
"""Exports an environment variable with the given value"""
os.environ[name] = value
return Define(name, value) | def function[export_env_string, parameter[name, value]]:
constant[Exports an environment variable with the given value]
call[name[os].environ][name[name]] assign[=] name[value]
return[call[name[Define], parameter[name[name], name[value]]]] | keyword[def] identifier[export_env_string] ( identifier[name] , identifier[value] ):
literal[string]
identifier[os] . identifier[environ] [ identifier[name] ]= identifier[value]
keyword[return] identifier[Define] ( identifier[name] , identifier[value] ) | def export_env_string(name, value):
# type: (AEnvName, AEnvValue) -> ADefine
'Exports an environment variable with the given value'
os.environ[name] = value
return Define(name, value) |
def save_reg(data):
'''
Save the register to msgpack files
'''
reg_dir = _reg_dir()
regfile = os.path.join(reg_dir, 'register')
try:
if not os.path.exists(reg_dir):
os.makedirs(reg_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
try:
with salt.utils.files.fopen(regfile, 'a') as fh_:
salt.utils.msgpack.dump(data, fh_)
except Exception:
log.error('Could not write to msgpack file %s', __opts__['outdir'])
raise | def function[save_reg, parameter[data]]:
constant[
Save the register to msgpack files
]
variable[reg_dir] assign[=] call[name[_reg_dir], parameter[]]
variable[regfile] assign[=] call[name[os].path.join, parameter[name[reg_dir], constant[register]]]
<ast.Try object at 0x7da207f01ab0>
<ast.Try object at 0x7da207f00ca0> | keyword[def] identifier[save_reg] ( identifier[data] ):
literal[string]
identifier[reg_dir] = identifier[_reg_dir] ()
identifier[regfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[reg_dir] , literal[string] )
keyword[try] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[reg_dir] ):
identifier[os] . identifier[makedirs] ( identifier[reg_dir] )
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] == identifier[errno] . identifier[EEXIST] :
keyword[pass]
keyword[else] :
keyword[raise]
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[regfile] , literal[string] ) keyword[as] identifier[fh_] :
identifier[salt] . identifier[utils] . identifier[msgpack] . identifier[dump] ( identifier[data] , identifier[fh_] )
keyword[except] identifier[Exception] :
identifier[log] . identifier[error] ( literal[string] , identifier[__opts__] [ literal[string] ])
keyword[raise] | def save_reg(data):
"""
Save the register to msgpack files
"""
reg_dir = _reg_dir()
regfile = os.path.join(reg_dir, 'register')
try:
if not os.path.exists(reg_dir):
os.makedirs(reg_dir) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except OSError as exc:
if exc.errno == errno.EEXIST:
pass # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['exc']]
try:
with salt.utils.files.fopen(regfile, 'a') as fh_:
salt.utils.msgpack.dump(data, fh_) # depends on [control=['with'], data=['fh_']] # depends on [control=['try'], data=[]]
except Exception:
log.error('Could not write to msgpack file %s', __opts__['outdir'])
raise # depends on [control=['except'], data=[]] |
def build_set(id=None, name=None, temporary_build=False, timestamp_alignment=False,
force=False, rebuild_mode=common.REBUILD_MODES_DEFAULT, **kwargs):
"""
Start a build of the given BuildConfigurationSet
"""
content = build_set_raw(id, name,
temporary_build, timestamp_alignment, force, rebuild_mode, **kwargs)
if content:
return utils.format_json(content) | def function[build_set, parameter[id, name, temporary_build, timestamp_alignment, force, rebuild_mode]]:
constant[
Start a build of the given BuildConfigurationSet
]
variable[content] assign[=] call[name[build_set_raw], parameter[name[id], name[name], name[temporary_build], name[timestamp_alignment], name[force], name[rebuild_mode]]]
if name[content] begin[:]
return[call[name[utils].format_json, parameter[name[content]]]] | keyword[def] identifier[build_set] ( identifier[id] = keyword[None] , identifier[name] = keyword[None] , identifier[temporary_build] = keyword[False] , identifier[timestamp_alignment] = keyword[False] ,
identifier[force] = keyword[False] , identifier[rebuild_mode] = identifier[common] . identifier[REBUILD_MODES_DEFAULT] ,** identifier[kwargs] ):
literal[string]
identifier[content] = identifier[build_set_raw] ( identifier[id] , identifier[name] ,
identifier[temporary_build] , identifier[timestamp_alignment] , identifier[force] , identifier[rebuild_mode] ,** identifier[kwargs] )
keyword[if] identifier[content] :
keyword[return] identifier[utils] . identifier[format_json] ( identifier[content] ) | def build_set(id=None, name=None, temporary_build=False, timestamp_alignment=False, force=False, rebuild_mode=common.REBUILD_MODES_DEFAULT, **kwargs):
"""
Start a build of the given BuildConfigurationSet
"""
content = build_set_raw(id, name, temporary_build, timestamp_alignment, force, rebuild_mode, **kwargs)
if content:
return utils.format_json(content) # depends on [control=['if'], data=[]] |
def difference_update(self, other):
"""Remove all elements of another set from this set."""
return self.client.sdiffstore(self.name, [self.name, other.name]) | def function[difference_update, parameter[self, other]]:
constant[Remove all elements of another set from this set.]
return[call[name[self].client.sdiffstore, parameter[name[self].name, list[[<ast.Attribute object at 0x7da18ede4100>, <ast.Attribute object at 0x7da18ede5cf0>]]]]] | keyword[def] identifier[difference_update] ( identifier[self] , identifier[other] ):
literal[string]
keyword[return] identifier[self] . identifier[client] . identifier[sdiffstore] ( identifier[self] . identifier[name] ,[ identifier[self] . identifier[name] , identifier[other] . identifier[name] ]) | def difference_update(self, other):
"""Remove all elements of another set from this set."""
return self.client.sdiffstore(self.name, [self.name, other.name]) |
def ReadRaster(self, *args, **kwargs):
"""Returns raster data bytes for partial or full extent.
Overrides gdal.Dataset.ReadRaster() with the full raster size by
default.
"""
args = args or (0, 0, self.ds.RasterXSize, self.ds.RasterYSize)
return self.ds.ReadRaster(*args, **kwargs) | def function[ReadRaster, parameter[self]]:
constant[Returns raster data bytes for partial or full extent.
Overrides gdal.Dataset.ReadRaster() with the full raster size by
default.
]
variable[args] assign[=] <ast.BoolOp object at 0x7da1b021d720>
return[call[name[self].ds.ReadRaster, parameter[<ast.Starred object at 0x7da204960580>]]] | keyword[def] identifier[ReadRaster] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[args] = identifier[args] keyword[or] ( literal[int] , literal[int] , identifier[self] . identifier[ds] . identifier[RasterXSize] , identifier[self] . identifier[ds] . identifier[RasterYSize] )
keyword[return] identifier[self] . identifier[ds] . identifier[ReadRaster] (* identifier[args] ,** identifier[kwargs] ) | def ReadRaster(self, *args, **kwargs):
"""Returns raster data bytes for partial or full extent.
Overrides gdal.Dataset.ReadRaster() with the full raster size by
default.
"""
args = args or (0, 0, self.ds.RasterXSize, self.ds.RasterYSize)
return self.ds.ReadRaster(*args, **kwargs) |
def _process_params(self, params):
""" Converts Unicode/lists/booleans inside HTTP parameters """
processed_params = {}
for key, value in params.items():
processed_params[key] = self._process_param_value(value)
return processed_params | def function[_process_params, parameter[self, params]]:
constant[ Converts Unicode/lists/booleans inside HTTP parameters ]
variable[processed_params] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b09a8b80>, <ast.Name object at 0x7da1b09a9390>]]] in starred[call[name[params].items, parameter[]]] begin[:]
call[name[processed_params]][name[key]] assign[=] call[name[self]._process_param_value, parameter[name[value]]]
return[name[processed_params]] | keyword[def] identifier[_process_params] ( identifier[self] , identifier[params] ):
literal[string]
identifier[processed_params] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[params] . identifier[items] ():
identifier[processed_params] [ identifier[key] ]= identifier[self] . identifier[_process_param_value] ( identifier[value] )
keyword[return] identifier[processed_params] | def _process_params(self, params):
""" Converts Unicode/lists/booleans inside HTTP parameters """
processed_params = {}
for (key, value) in params.items():
processed_params[key] = self._process_param_value(value) # depends on [control=['for'], data=[]]
return processed_params |
def move(self, source, dest):
"""
the semantic should be like unix 'mv' command.
Unfortunatelly, shutil.move does work differently!!!
Consider (all paths point to directories)
mv /a/b /a/c
expected outcome:
case 1.: 'c' does not exist:
b moved over to /a such that /a/c is what was /a/b/ before
case 2.: 'c' does exist:
b is moved into '/a/c/' such that we have now '/a/c/b'
But shutil.move will use os.rename whenever possible which means that
'/a/b' is renamed to '/a/c'. The outcome is that the content from b
ends up in c.
"""
if dest.scheme == 'file':
if source.isdir() and dest.isdir():
dest /= source.basename()
return shutil.move(source.path, dest.path)
else:
return super(LocalFileSystem, self).move(source, dest) | def function[move, parameter[self, source, dest]]:
constant[
the semantic should be like unix 'mv' command.
Unfortunatelly, shutil.move does work differently!!!
Consider (all paths point to directories)
mv /a/b /a/c
expected outcome:
case 1.: 'c' does not exist:
b moved over to /a such that /a/c is what was /a/b/ before
case 2.: 'c' does exist:
b is moved into '/a/c/' such that we have now '/a/c/b'
But shutil.move will use os.rename whenever possible which means that
'/a/b' is renamed to '/a/c'. The outcome is that the content from b
ends up in c.
]
if compare[name[dest].scheme equal[==] constant[file]] begin[:]
if <ast.BoolOp object at 0x7da2054a4ee0> begin[:]
<ast.AugAssign object at 0x7da2054a7970>
return[call[name[shutil].move, parameter[name[source].path, name[dest].path]]] | keyword[def] identifier[move] ( identifier[self] , identifier[source] , identifier[dest] ):
literal[string]
keyword[if] identifier[dest] . identifier[scheme] == literal[string] :
keyword[if] identifier[source] . identifier[isdir] () keyword[and] identifier[dest] . identifier[isdir] ():
identifier[dest] /= identifier[source] . identifier[basename] ()
keyword[return] identifier[shutil] . identifier[move] ( identifier[source] . identifier[path] , identifier[dest] . identifier[path] )
keyword[else] :
keyword[return] identifier[super] ( identifier[LocalFileSystem] , identifier[self] ). identifier[move] ( identifier[source] , identifier[dest] ) | def move(self, source, dest):
"""
the semantic should be like unix 'mv' command.
Unfortunatelly, shutil.move does work differently!!!
Consider (all paths point to directories)
mv /a/b /a/c
expected outcome:
case 1.: 'c' does not exist:
b moved over to /a such that /a/c is what was /a/b/ before
case 2.: 'c' does exist:
b is moved into '/a/c/' such that we have now '/a/c/b'
But shutil.move will use os.rename whenever possible which means that
'/a/b' is renamed to '/a/c'. The outcome is that the content from b
ends up in c.
"""
if dest.scheme == 'file':
if source.isdir() and dest.isdir():
dest /= source.basename() # depends on [control=['if'], data=[]]
return shutil.move(source.path, dest.path) # depends on [control=['if'], data=[]]
else:
return super(LocalFileSystem, self).move(source, dest) |
def runas(cmdLine, username, password=None, cwd=None):
'''
Run a command as another user. If the process is running as an admin or
system account this method does not require a password. Other non
privileged accounts need to provide a password for the user to runas.
Commands are run in with the highest level privileges possible for the
account provided.
'''
# Elevate the token from the current process
access = (
win32security.TOKEN_QUERY |
win32security.TOKEN_ADJUST_PRIVILEGES
)
th = win32security.OpenProcessToken(win32api.GetCurrentProcess(), access)
salt.platform.win.elevate_token(th)
# Try to impersonate the SYSTEM user. This process needs to be running as a
# user who as been granted the SeImpersonatePrivilege, Administrator
# accounts have this permission by default.
try:
impersonation_token = salt.platform.win.impersonate_sid(
salt.platform.win.SYSTEM_SID,
session_id=0,
privs=['SeTcbPrivilege'],
)
except WindowsError: # pylint: disable=undefined-variable
log.debug("Unable to impersonate SYSTEM user")
impersonation_token = None
# Impersonation of the SYSTEM user failed. Fallback to an un-privileged
# runas.
if not impersonation_token:
log.debug("No impersonation token, using unprivileged runas")
return runas_unpriv(cmdLine, username, password, cwd)
username, domain = split_username(username)
# Validate the domain and sid exist for the username
try:
_, domain, _ = win32security.LookupAccountName(domain, username)
except pywintypes.error as exc:
message = win32api.FormatMessage(exc.winerror).rstrip('\n')
raise CommandExecutionError(message)
if domain == 'NT AUTHORITY':
# Logon as a system level account, SYSTEM, LOCAL SERVICE, or NETWORK
# SERVICE.
logonType = win32con.LOGON32_LOGON_SERVICE
user_token = win32security.LogonUser(
username,
domain,
'',
win32con.LOGON32_LOGON_SERVICE,
win32con.LOGON32_PROVIDER_DEFAULT,
)
elif password:
# Login with a password.
user_token = win32security.LogonUser(
username,
domain,
password,
win32con.LOGON32_LOGON_INTERACTIVE,
win32con.LOGON32_PROVIDER_DEFAULT,
)
else:
# Login without a password. This always returns an elevated token.
user_token = salt.platform.win.logon_msv1_s4u(username).Token
# Get a linked user token to elevate if needed
elevation_type = win32security.GetTokenInformation(
user_token, win32security.TokenElevationType
)
if elevation_type > 1:
user_token = win32security.GetTokenInformation(
user_token,
win32security.TokenLinkedToken
)
# Elevate the user token
salt.platform.win.elevate_token(user_token)
# Make sure the user's token has access to a windows station and desktop
salt.platform.win.grant_winsta_and_desktop(user_token)
# Create pipes for standard in, out and error streams
security_attributes = win32security.SECURITY_ATTRIBUTES()
security_attributes.bInheritHandle = 1
stdin_read, stdin_write = win32pipe.CreatePipe(security_attributes, 0)
stdin_read = salt.platform.win.make_inheritable(stdin_read)
stdout_read, stdout_write = win32pipe.CreatePipe(security_attributes, 0)
stdout_write = salt.platform.win.make_inheritable(stdout_write)
stderr_read, stderr_write = win32pipe.CreatePipe(security_attributes, 0)
stderr_write = salt.platform.win.make_inheritable(stderr_write)
# Run the process without showing a window.
creationflags = (
win32process.CREATE_NO_WINDOW |
win32process.CREATE_NEW_CONSOLE |
win32process.CREATE_SUSPENDED
)
startup_info = salt.platform.win.STARTUPINFO(
dwFlags=win32con.STARTF_USESTDHANDLES,
hStdInput=stdin_read.handle,
hStdOutput=stdout_write.handle,
hStdError=stderr_write.handle,
)
# Create the environment for the user
env = win32profile.CreateEnvironmentBlock(user_token, False)
# Start the process in a suspended state.
process_info = salt.platform.win.CreateProcessWithTokenW(
int(user_token),
logonflags=1,
applicationname=None,
commandline=cmdLine,
currentdirectory=cwd,
creationflags=creationflags,
startupinfo=startup_info,
environment=env,
)
hProcess = process_info.hProcess
hThread = process_info.hThread
dwProcessId = process_info.dwProcessId
dwThreadId = process_info.dwThreadId
salt.platform.win.kernel32.CloseHandle(stdin_write.handle)
salt.platform.win.kernel32.CloseHandle(stdout_write.handle)
salt.platform.win.kernel32.CloseHandle(stderr_write.handle)
ret = {'pid': dwProcessId}
# Resume the process
psutil.Process(dwProcessId).resume()
# Wait for the process to exit and get it's return code.
if win32event.WaitForSingleObject(hProcess, win32event.INFINITE) == win32con.WAIT_OBJECT_0:
exitcode = win32process.GetExitCodeProcess(hProcess)
ret['retcode'] = exitcode
# Read standard out
fd_out = msvcrt.open_osfhandle(stdout_read.handle, os.O_RDONLY | os.O_TEXT)
with os.fdopen(fd_out, 'r') as f_out:
stdout = f_out.read()
ret['stdout'] = stdout
# Read standard error
fd_err = msvcrt.open_osfhandle(stderr_read.handle, os.O_RDONLY | os.O_TEXT)
with os.fdopen(fd_err, 'r') as f_err:
stderr = f_err.read()
ret['stderr'] = stderr
salt.platform.win.kernel32.CloseHandle(hProcess)
win32api.CloseHandle(user_token)
if impersonation_token:
win32security.RevertToSelf()
win32api.CloseHandle(impersonation_token)
return ret | def function[runas, parameter[cmdLine, username, password, cwd]]:
constant[
Run a command as another user. If the process is running as an admin or
system account this method does not require a password. Other non
privileged accounts need to provide a password for the user to runas.
Commands are run in with the highest level privileges possible for the
account provided.
]
variable[access] assign[=] binary_operation[name[win32security].TOKEN_QUERY <ast.BitOr object at 0x7da2590d6aa0> name[win32security].TOKEN_ADJUST_PRIVILEGES]
variable[th] assign[=] call[name[win32security].OpenProcessToken, parameter[call[name[win32api].GetCurrentProcess, parameter[]], name[access]]]
call[name[salt].platform.win.elevate_token, parameter[name[th]]]
<ast.Try object at 0x7da20e954fa0>
if <ast.UnaryOp object at 0x7da20e957010> begin[:]
call[name[log].debug, parameter[constant[No impersonation token, using unprivileged runas]]]
return[call[name[runas_unpriv], parameter[name[cmdLine], name[username], name[password], name[cwd]]]]
<ast.Tuple object at 0x7da20e9548e0> assign[=] call[name[split_username], parameter[name[username]]]
<ast.Try object at 0x7da20e956440>
if compare[name[domain] equal[==] constant[NT AUTHORITY]] begin[:]
variable[logonType] assign[=] name[win32con].LOGON32_LOGON_SERVICE
variable[user_token] assign[=] call[name[win32security].LogonUser, parameter[name[username], name[domain], constant[], name[win32con].LOGON32_LOGON_SERVICE, name[win32con].LOGON32_PROVIDER_DEFAULT]]
variable[elevation_type] assign[=] call[name[win32security].GetTokenInformation, parameter[name[user_token], name[win32security].TokenElevationType]]
if compare[name[elevation_type] greater[>] constant[1]] begin[:]
variable[user_token] assign[=] call[name[win32security].GetTokenInformation, parameter[name[user_token], name[win32security].TokenLinkedToken]]
call[name[salt].platform.win.elevate_token, parameter[name[user_token]]]
call[name[salt].platform.win.grant_winsta_and_desktop, parameter[name[user_token]]]
variable[security_attributes] assign[=] call[name[win32security].SECURITY_ATTRIBUTES, parameter[]]
name[security_attributes].bInheritHandle assign[=] constant[1]
<ast.Tuple object at 0x7da2046227d0> assign[=] call[name[win32pipe].CreatePipe, parameter[name[security_attributes], constant[0]]]
variable[stdin_read] assign[=] call[name[salt].platform.win.make_inheritable, parameter[name[stdin_read]]]
<ast.Tuple object at 0x7da2046210c0> assign[=] call[name[win32pipe].CreatePipe, parameter[name[security_attributes], constant[0]]]
variable[stdout_write] assign[=] call[name[salt].platform.win.make_inheritable, parameter[name[stdout_write]]]
<ast.Tuple object at 0x7da204347550> assign[=] call[name[win32pipe].CreatePipe, parameter[name[security_attributes], constant[0]]]
variable[stderr_write] assign[=] call[name[salt].platform.win.make_inheritable, parameter[name[stderr_write]]]
variable[creationflags] assign[=] binary_operation[binary_operation[name[win32process].CREATE_NO_WINDOW <ast.BitOr object at 0x7da2590d6aa0> name[win32process].CREATE_NEW_CONSOLE] <ast.BitOr object at 0x7da2590d6aa0> name[win32process].CREATE_SUSPENDED]
variable[startup_info] assign[=] call[name[salt].platform.win.STARTUPINFO, parameter[]]
variable[env] assign[=] call[name[win32profile].CreateEnvironmentBlock, parameter[name[user_token], constant[False]]]
variable[process_info] assign[=] call[name[salt].platform.win.CreateProcessWithTokenW, parameter[call[name[int], parameter[name[user_token]]]]]
variable[hProcess] assign[=] name[process_info].hProcess
variable[hThread] assign[=] name[process_info].hThread
variable[dwProcessId] assign[=] name[process_info].dwProcessId
variable[dwThreadId] assign[=] name[process_info].dwThreadId
call[name[salt].platform.win.kernel32.CloseHandle, parameter[name[stdin_write].handle]]
call[name[salt].platform.win.kernel32.CloseHandle, parameter[name[stdout_write].handle]]
call[name[salt].platform.win.kernel32.CloseHandle, parameter[name[stderr_write].handle]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da204346e30>], [<ast.Name object at 0x7da204347c40>]]
call[call[name[psutil].Process, parameter[name[dwProcessId]]].resume, parameter[]]
if compare[call[name[win32event].WaitForSingleObject, parameter[name[hProcess], name[win32event].INFINITE]] equal[==] name[win32con].WAIT_OBJECT_0] begin[:]
variable[exitcode] assign[=] call[name[win32process].GetExitCodeProcess, parameter[name[hProcess]]]
call[name[ret]][constant[retcode]] assign[=] name[exitcode]
variable[fd_out] assign[=] call[name[msvcrt].open_osfhandle, parameter[name[stdout_read].handle, binary_operation[name[os].O_RDONLY <ast.BitOr object at 0x7da2590d6aa0> name[os].O_TEXT]]]
with call[name[os].fdopen, parameter[name[fd_out], constant[r]]] begin[:]
variable[stdout] assign[=] call[name[f_out].read, parameter[]]
call[name[ret]][constant[stdout]] assign[=] name[stdout]
variable[fd_err] assign[=] call[name[msvcrt].open_osfhandle, parameter[name[stderr_read].handle, binary_operation[name[os].O_RDONLY <ast.BitOr object at 0x7da2590d6aa0> name[os].O_TEXT]]]
with call[name[os].fdopen, parameter[name[fd_err], constant[r]]] begin[:]
variable[stderr] assign[=] call[name[f_err].read, parameter[]]
call[name[ret]][constant[stderr]] assign[=] name[stderr]
call[name[salt].platform.win.kernel32.CloseHandle, parameter[name[hProcess]]]
call[name[win32api].CloseHandle, parameter[name[user_token]]]
if name[impersonation_token] begin[:]
call[name[win32security].RevertToSelf, parameter[]]
call[name[win32api].CloseHandle, parameter[name[impersonation_token]]]
return[name[ret]] | keyword[def] identifier[runas] ( identifier[cmdLine] , identifier[username] , identifier[password] = keyword[None] , identifier[cwd] = keyword[None] ):
literal[string]
identifier[access] =(
identifier[win32security] . identifier[TOKEN_QUERY] |
identifier[win32security] . identifier[TOKEN_ADJUST_PRIVILEGES]
)
identifier[th] = identifier[win32security] . identifier[OpenProcessToken] ( identifier[win32api] . identifier[GetCurrentProcess] (), identifier[access] )
identifier[salt] . identifier[platform] . identifier[win] . identifier[elevate_token] ( identifier[th] )
keyword[try] :
identifier[impersonation_token] = identifier[salt] . identifier[platform] . identifier[win] . identifier[impersonate_sid] (
identifier[salt] . identifier[platform] . identifier[win] . identifier[SYSTEM_SID] ,
identifier[session_id] = literal[int] ,
identifier[privs] =[ literal[string] ],
)
keyword[except] identifier[WindowsError] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[impersonation_token] = keyword[None]
keyword[if] keyword[not] identifier[impersonation_token] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[return] identifier[runas_unpriv] ( identifier[cmdLine] , identifier[username] , identifier[password] , identifier[cwd] )
identifier[username] , identifier[domain] = identifier[split_username] ( identifier[username] )
keyword[try] :
identifier[_] , identifier[domain] , identifier[_] = identifier[win32security] . identifier[LookupAccountName] ( identifier[domain] , identifier[username] )
keyword[except] identifier[pywintypes] . identifier[error] keyword[as] identifier[exc] :
identifier[message] = identifier[win32api] . identifier[FormatMessage] ( identifier[exc] . identifier[winerror] ). identifier[rstrip] ( literal[string] )
keyword[raise] identifier[CommandExecutionError] ( identifier[message] )
keyword[if] identifier[domain] == literal[string] :
identifier[logonType] = identifier[win32con] . identifier[LOGON32_LOGON_SERVICE]
identifier[user_token] = identifier[win32security] . identifier[LogonUser] (
identifier[username] ,
identifier[domain] ,
literal[string] ,
identifier[win32con] . identifier[LOGON32_LOGON_SERVICE] ,
identifier[win32con] . identifier[LOGON32_PROVIDER_DEFAULT] ,
)
keyword[elif] identifier[password] :
identifier[user_token] = identifier[win32security] . identifier[LogonUser] (
identifier[username] ,
identifier[domain] ,
identifier[password] ,
identifier[win32con] . identifier[LOGON32_LOGON_INTERACTIVE] ,
identifier[win32con] . identifier[LOGON32_PROVIDER_DEFAULT] ,
)
keyword[else] :
identifier[user_token] = identifier[salt] . identifier[platform] . identifier[win] . identifier[logon_msv1_s4u] ( identifier[username] ). identifier[Token]
identifier[elevation_type] = identifier[win32security] . identifier[GetTokenInformation] (
identifier[user_token] , identifier[win32security] . identifier[TokenElevationType]
)
keyword[if] identifier[elevation_type] > literal[int] :
identifier[user_token] = identifier[win32security] . identifier[GetTokenInformation] (
identifier[user_token] ,
identifier[win32security] . identifier[TokenLinkedToken]
)
identifier[salt] . identifier[platform] . identifier[win] . identifier[elevate_token] ( identifier[user_token] )
identifier[salt] . identifier[platform] . identifier[win] . identifier[grant_winsta_and_desktop] ( identifier[user_token] )
identifier[security_attributes] = identifier[win32security] . identifier[SECURITY_ATTRIBUTES] ()
identifier[security_attributes] . identifier[bInheritHandle] = literal[int]
identifier[stdin_read] , identifier[stdin_write] = identifier[win32pipe] . identifier[CreatePipe] ( identifier[security_attributes] , literal[int] )
identifier[stdin_read] = identifier[salt] . identifier[platform] . identifier[win] . identifier[make_inheritable] ( identifier[stdin_read] )
identifier[stdout_read] , identifier[stdout_write] = identifier[win32pipe] . identifier[CreatePipe] ( identifier[security_attributes] , literal[int] )
identifier[stdout_write] = identifier[salt] . identifier[platform] . identifier[win] . identifier[make_inheritable] ( identifier[stdout_write] )
identifier[stderr_read] , identifier[stderr_write] = identifier[win32pipe] . identifier[CreatePipe] ( identifier[security_attributes] , literal[int] )
identifier[stderr_write] = identifier[salt] . identifier[platform] . identifier[win] . identifier[make_inheritable] ( identifier[stderr_write] )
identifier[creationflags] =(
identifier[win32process] . identifier[CREATE_NO_WINDOW] |
identifier[win32process] . identifier[CREATE_NEW_CONSOLE] |
identifier[win32process] . identifier[CREATE_SUSPENDED]
)
identifier[startup_info] = identifier[salt] . identifier[platform] . identifier[win] . identifier[STARTUPINFO] (
identifier[dwFlags] = identifier[win32con] . identifier[STARTF_USESTDHANDLES] ,
identifier[hStdInput] = identifier[stdin_read] . identifier[handle] ,
identifier[hStdOutput] = identifier[stdout_write] . identifier[handle] ,
identifier[hStdError] = identifier[stderr_write] . identifier[handle] ,
)
identifier[env] = identifier[win32profile] . identifier[CreateEnvironmentBlock] ( identifier[user_token] , keyword[False] )
identifier[process_info] = identifier[salt] . identifier[platform] . identifier[win] . identifier[CreateProcessWithTokenW] (
identifier[int] ( identifier[user_token] ),
identifier[logonflags] = literal[int] ,
identifier[applicationname] = keyword[None] ,
identifier[commandline] = identifier[cmdLine] ,
identifier[currentdirectory] = identifier[cwd] ,
identifier[creationflags] = identifier[creationflags] ,
identifier[startupinfo] = identifier[startup_info] ,
identifier[environment] = identifier[env] ,
)
identifier[hProcess] = identifier[process_info] . identifier[hProcess]
identifier[hThread] = identifier[process_info] . identifier[hThread]
identifier[dwProcessId] = identifier[process_info] . identifier[dwProcessId]
identifier[dwThreadId] = identifier[process_info] . identifier[dwThreadId]
identifier[salt] . identifier[platform] . identifier[win] . identifier[kernel32] . identifier[CloseHandle] ( identifier[stdin_write] . identifier[handle] )
identifier[salt] . identifier[platform] . identifier[win] . identifier[kernel32] . identifier[CloseHandle] ( identifier[stdout_write] . identifier[handle] )
identifier[salt] . identifier[platform] . identifier[win] . identifier[kernel32] . identifier[CloseHandle] ( identifier[stderr_write] . identifier[handle] )
identifier[ret] ={ literal[string] : identifier[dwProcessId] }
identifier[psutil] . identifier[Process] ( identifier[dwProcessId] ). identifier[resume] ()
keyword[if] identifier[win32event] . identifier[WaitForSingleObject] ( identifier[hProcess] , identifier[win32event] . identifier[INFINITE] )== identifier[win32con] . identifier[WAIT_OBJECT_0] :
identifier[exitcode] = identifier[win32process] . identifier[GetExitCodeProcess] ( identifier[hProcess] )
identifier[ret] [ literal[string] ]= identifier[exitcode]
identifier[fd_out] = identifier[msvcrt] . identifier[open_osfhandle] ( identifier[stdout_read] . identifier[handle] , identifier[os] . identifier[O_RDONLY] | identifier[os] . identifier[O_TEXT] )
keyword[with] identifier[os] . identifier[fdopen] ( identifier[fd_out] , literal[string] ) keyword[as] identifier[f_out] :
identifier[stdout] = identifier[f_out] . identifier[read] ()
identifier[ret] [ literal[string] ]= identifier[stdout]
identifier[fd_err] = identifier[msvcrt] . identifier[open_osfhandle] ( identifier[stderr_read] . identifier[handle] , identifier[os] . identifier[O_RDONLY] | identifier[os] . identifier[O_TEXT] )
keyword[with] identifier[os] . identifier[fdopen] ( identifier[fd_err] , literal[string] ) keyword[as] identifier[f_err] :
identifier[stderr] = identifier[f_err] . identifier[read] ()
identifier[ret] [ literal[string] ]= identifier[stderr]
identifier[salt] . identifier[platform] . identifier[win] . identifier[kernel32] . identifier[CloseHandle] ( identifier[hProcess] )
identifier[win32api] . identifier[CloseHandle] ( identifier[user_token] )
keyword[if] identifier[impersonation_token] :
identifier[win32security] . identifier[RevertToSelf] ()
identifier[win32api] . identifier[CloseHandle] ( identifier[impersonation_token] )
keyword[return] identifier[ret] | def runas(cmdLine, username, password=None, cwd=None):
"""
Run a command as another user. If the process is running as an admin or
system account this method does not require a password. Other non
privileged accounts need to provide a password for the user to runas.
Commands are run in with the highest level privileges possible for the
account provided.
"""
# Elevate the token from the current process
access = win32security.TOKEN_QUERY | win32security.TOKEN_ADJUST_PRIVILEGES
th = win32security.OpenProcessToken(win32api.GetCurrentProcess(), access)
salt.platform.win.elevate_token(th)
# Try to impersonate the SYSTEM user. This process needs to be running as a
# user who as been granted the SeImpersonatePrivilege, Administrator
# accounts have this permission by default.
try:
impersonation_token = salt.platform.win.impersonate_sid(salt.platform.win.SYSTEM_SID, session_id=0, privs=['SeTcbPrivilege']) # depends on [control=['try'], data=[]]
except WindowsError: # pylint: disable=undefined-variable
log.debug('Unable to impersonate SYSTEM user')
impersonation_token = None # depends on [control=['except'], data=[]]
# Impersonation of the SYSTEM user failed. Fallback to an un-privileged
# runas.
if not impersonation_token:
log.debug('No impersonation token, using unprivileged runas')
return runas_unpriv(cmdLine, username, password, cwd) # depends on [control=['if'], data=[]]
(username, domain) = split_username(username)
# Validate the domain and sid exist for the username
try:
(_, domain, _) = win32security.LookupAccountName(domain, username) # depends on [control=['try'], data=[]]
except pywintypes.error as exc:
message = win32api.FormatMessage(exc.winerror).rstrip('\n')
raise CommandExecutionError(message) # depends on [control=['except'], data=['exc']]
if domain == 'NT AUTHORITY':
# Logon as a system level account, SYSTEM, LOCAL SERVICE, or NETWORK
# SERVICE.
logonType = win32con.LOGON32_LOGON_SERVICE
user_token = win32security.LogonUser(username, domain, '', win32con.LOGON32_LOGON_SERVICE, win32con.LOGON32_PROVIDER_DEFAULT) # depends on [control=['if'], data=['domain']]
elif password:
# Login with a password.
user_token = win32security.LogonUser(username, domain, password, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT) # depends on [control=['if'], data=[]]
else:
# Login without a password. This always returns an elevated token.
user_token = salt.platform.win.logon_msv1_s4u(username).Token
# Get a linked user token to elevate if needed
elevation_type = win32security.GetTokenInformation(user_token, win32security.TokenElevationType)
if elevation_type > 1:
user_token = win32security.GetTokenInformation(user_token, win32security.TokenLinkedToken) # depends on [control=['if'], data=[]]
# Elevate the user token
salt.platform.win.elevate_token(user_token)
# Make sure the user's token has access to a windows station and desktop
salt.platform.win.grant_winsta_and_desktop(user_token)
# Create pipes for standard in, out and error streams
security_attributes = win32security.SECURITY_ATTRIBUTES()
security_attributes.bInheritHandle = 1
(stdin_read, stdin_write) = win32pipe.CreatePipe(security_attributes, 0)
stdin_read = salt.platform.win.make_inheritable(stdin_read)
(stdout_read, stdout_write) = win32pipe.CreatePipe(security_attributes, 0)
stdout_write = salt.platform.win.make_inheritable(stdout_write)
(stderr_read, stderr_write) = win32pipe.CreatePipe(security_attributes, 0)
stderr_write = salt.platform.win.make_inheritable(stderr_write)
# Run the process without showing a window.
creationflags = win32process.CREATE_NO_WINDOW | win32process.CREATE_NEW_CONSOLE | win32process.CREATE_SUSPENDED
startup_info = salt.platform.win.STARTUPINFO(dwFlags=win32con.STARTF_USESTDHANDLES, hStdInput=stdin_read.handle, hStdOutput=stdout_write.handle, hStdError=stderr_write.handle)
# Create the environment for the user
env = win32profile.CreateEnvironmentBlock(user_token, False)
# Start the process in a suspended state.
process_info = salt.platform.win.CreateProcessWithTokenW(int(user_token), logonflags=1, applicationname=None, commandline=cmdLine, currentdirectory=cwd, creationflags=creationflags, startupinfo=startup_info, environment=env)
hProcess = process_info.hProcess
hThread = process_info.hThread
dwProcessId = process_info.dwProcessId
dwThreadId = process_info.dwThreadId
salt.platform.win.kernel32.CloseHandle(stdin_write.handle)
salt.platform.win.kernel32.CloseHandle(stdout_write.handle)
salt.platform.win.kernel32.CloseHandle(stderr_write.handle)
ret = {'pid': dwProcessId}
# Resume the process
psutil.Process(dwProcessId).resume()
# Wait for the process to exit and get it's return code.
if win32event.WaitForSingleObject(hProcess, win32event.INFINITE) == win32con.WAIT_OBJECT_0:
exitcode = win32process.GetExitCodeProcess(hProcess)
ret['retcode'] = exitcode # depends on [control=['if'], data=[]]
# Read standard out
fd_out = msvcrt.open_osfhandle(stdout_read.handle, os.O_RDONLY | os.O_TEXT)
with os.fdopen(fd_out, 'r') as f_out:
stdout = f_out.read()
ret['stdout'] = stdout # depends on [control=['with'], data=['f_out']]
# Read standard error
fd_err = msvcrt.open_osfhandle(stderr_read.handle, os.O_RDONLY | os.O_TEXT)
with os.fdopen(fd_err, 'r') as f_err:
stderr = f_err.read()
ret['stderr'] = stderr # depends on [control=['with'], data=['f_err']]
salt.platform.win.kernel32.CloseHandle(hProcess)
win32api.CloseHandle(user_token)
if impersonation_token:
win32security.RevertToSelf() # depends on [control=['if'], data=[]]
win32api.CloseHandle(impersonation_token)
return ret |
def Grid(
pos=(0, 0, 0),
normal=(0, 0, 1),
sx=1,
sy=1,
c="g",
bc="darkgreen",
lw=1,
alpha=1,
resx=10,
resy=10,
):
"""Return a grid plane.
.. hint:: |brownian2D| |brownian2D.py|_
"""
ps = vtk.vtkPlaneSource()
ps.SetResolution(resx, resy)
ps.Update()
poly0 = ps.GetOutput()
t0 = vtk.vtkTransform()
t0.Scale(sx, sy, 1)
tf0 = vtk.vtkTransformPolyDataFilter()
tf0.SetInputData(poly0)
tf0.SetTransform(t0)
tf0.Update()
poly = tf0.GetOutput()
axis = np.array(normal) / np.linalg.norm(normal)
theta = np.arccos(axis[2])
phi = np.arctan2(axis[1], axis[0])
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateY(theta * 57.3)
t.RotateZ(phi * 57.3)
tf = vtk.vtkTransformPolyDataFilter()
tf.SetInputData(poly)
tf.SetTransform(t)
tf.Update()
pd = tf.GetOutput()
actor = Actor(pd, c=c, bc=bc, alpha=alpha)
actor.GetProperty().SetRepresentationToWireframe()
actor.GetProperty().SetLineWidth(lw)
actor.SetPosition(pos)
settings.collectable_actors.append(actor)
return actor | def function[Grid, parameter[pos, normal, sx, sy, c, bc, lw, alpha, resx, resy]]:
constant[Return a grid plane.
.. hint:: |brownian2D| |brownian2D.py|_
]
variable[ps] assign[=] call[name[vtk].vtkPlaneSource, parameter[]]
call[name[ps].SetResolution, parameter[name[resx], name[resy]]]
call[name[ps].Update, parameter[]]
variable[poly0] assign[=] call[name[ps].GetOutput, parameter[]]
variable[t0] assign[=] call[name[vtk].vtkTransform, parameter[]]
call[name[t0].Scale, parameter[name[sx], name[sy], constant[1]]]
variable[tf0] assign[=] call[name[vtk].vtkTransformPolyDataFilter, parameter[]]
call[name[tf0].SetInputData, parameter[name[poly0]]]
call[name[tf0].SetTransform, parameter[name[t0]]]
call[name[tf0].Update, parameter[]]
variable[poly] assign[=] call[name[tf0].GetOutput, parameter[]]
variable[axis] assign[=] binary_operation[call[name[np].array, parameter[name[normal]]] / call[name[np].linalg.norm, parameter[name[normal]]]]
variable[theta] assign[=] call[name[np].arccos, parameter[call[name[axis]][constant[2]]]]
variable[phi] assign[=] call[name[np].arctan2, parameter[call[name[axis]][constant[1]], call[name[axis]][constant[0]]]]
variable[t] assign[=] call[name[vtk].vtkTransform, parameter[]]
call[name[t].PostMultiply, parameter[]]
call[name[t].RotateY, parameter[binary_operation[name[theta] * constant[57.3]]]]
call[name[t].RotateZ, parameter[binary_operation[name[phi] * constant[57.3]]]]
variable[tf] assign[=] call[name[vtk].vtkTransformPolyDataFilter, parameter[]]
call[name[tf].SetInputData, parameter[name[poly]]]
call[name[tf].SetTransform, parameter[name[t]]]
call[name[tf].Update, parameter[]]
variable[pd] assign[=] call[name[tf].GetOutput, parameter[]]
variable[actor] assign[=] call[name[Actor], parameter[name[pd]]]
call[call[name[actor].GetProperty, parameter[]].SetRepresentationToWireframe, parameter[]]
call[call[name[actor].GetProperty, parameter[]].SetLineWidth, parameter[name[lw]]]
call[name[actor].SetPosition, parameter[name[pos]]]
call[name[settings].collectable_actors.append, parameter[name[actor]]]
return[name[actor]] | keyword[def] identifier[Grid] (
identifier[pos] =( literal[int] , literal[int] , literal[int] ),
identifier[normal] =( literal[int] , literal[int] , literal[int] ),
identifier[sx] = literal[int] ,
identifier[sy] = literal[int] ,
identifier[c] = literal[string] ,
identifier[bc] = literal[string] ,
identifier[lw] = literal[int] ,
identifier[alpha] = literal[int] ,
identifier[resx] = literal[int] ,
identifier[resy] = literal[int] ,
):
literal[string]
identifier[ps] = identifier[vtk] . identifier[vtkPlaneSource] ()
identifier[ps] . identifier[SetResolution] ( identifier[resx] , identifier[resy] )
identifier[ps] . identifier[Update] ()
identifier[poly0] = identifier[ps] . identifier[GetOutput] ()
identifier[t0] = identifier[vtk] . identifier[vtkTransform] ()
identifier[t0] . identifier[Scale] ( identifier[sx] , identifier[sy] , literal[int] )
identifier[tf0] = identifier[vtk] . identifier[vtkTransformPolyDataFilter] ()
identifier[tf0] . identifier[SetInputData] ( identifier[poly0] )
identifier[tf0] . identifier[SetTransform] ( identifier[t0] )
identifier[tf0] . identifier[Update] ()
identifier[poly] = identifier[tf0] . identifier[GetOutput] ()
identifier[axis] = identifier[np] . identifier[array] ( identifier[normal] )/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[normal] )
identifier[theta] = identifier[np] . identifier[arccos] ( identifier[axis] [ literal[int] ])
identifier[phi] = identifier[np] . identifier[arctan2] ( identifier[axis] [ literal[int] ], identifier[axis] [ literal[int] ])
identifier[t] = identifier[vtk] . identifier[vtkTransform] ()
identifier[t] . identifier[PostMultiply] ()
identifier[t] . identifier[RotateY] ( identifier[theta] * literal[int] )
identifier[t] . identifier[RotateZ] ( identifier[phi] * literal[int] )
identifier[tf] = identifier[vtk] . identifier[vtkTransformPolyDataFilter] ()
identifier[tf] . identifier[SetInputData] ( identifier[poly] )
identifier[tf] . identifier[SetTransform] ( identifier[t] )
identifier[tf] . identifier[Update] ()
identifier[pd] = identifier[tf] . identifier[GetOutput] ()
identifier[actor] = identifier[Actor] ( identifier[pd] , identifier[c] = identifier[c] , identifier[bc] = identifier[bc] , identifier[alpha] = identifier[alpha] )
identifier[actor] . identifier[GetProperty] (). identifier[SetRepresentationToWireframe] ()
identifier[actor] . identifier[GetProperty] (). identifier[SetLineWidth] ( identifier[lw] )
identifier[actor] . identifier[SetPosition] ( identifier[pos] )
identifier[settings] . identifier[collectable_actors] . identifier[append] ( identifier[actor] )
keyword[return] identifier[actor] | def Grid(pos=(0, 0, 0), normal=(0, 0, 1), sx=1, sy=1, c='g', bc='darkgreen', lw=1, alpha=1, resx=10, resy=10):
"""Return a grid plane.
.. hint:: |brownian2D| |brownian2D.py|_
"""
ps = vtk.vtkPlaneSource()
ps.SetResolution(resx, resy)
ps.Update()
poly0 = ps.GetOutput()
t0 = vtk.vtkTransform()
t0.Scale(sx, sy, 1)
tf0 = vtk.vtkTransformPolyDataFilter()
tf0.SetInputData(poly0)
tf0.SetTransform(t0)
tf0.Update()
poly = tf0.GetOutput()
axis = np.array(normal) / np.linalg.norm(normal)
theta = np.arccos(axis[2])
phi = np.arctan2(axis[1], axis[0])
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateY(theta * 57.3)
t.RotateZ(phi * 57.3)
tf = vtk.vtkTransformPolyDataFilter()
tf.SetInputData(poly)
tf.SetTransform(t)
tf.Update()
pd = tf.GetOutput()
actor = Actor(pd, c=c, bc=bc, alpha=alpha)
actor.GetProperty().SetRepresentationToWireframe()
actor.GetProperty().SetLineWidth(lw)
actor.SetPosition(pos)
settings.collectable_actors.append(actor)
return actor |
def newest_bugs(amount):
"""Returns the newest bugs.
This method can be used to query the BTS for the n newest bugs.
Parameters
----------
amount : int
the number of desired bugs. E.g. if `amount` is 10 the method
will return the 10 latest bugs.
Returns
-------
bugs : list of int
the bugnumbers
"""
reply = _soap_client_call('newest_bugs', amount)
items_el = reply('soapenc:Array')
return [int(item_el) for item_el in items_el.children() or []] | def function[newest_bugs, parameter[amount]]:
constant[Returns the newest bugs.
This method can be used to query the BTS for the n newest bugs.
Parameters
----------
amount : int
the number of desired bugs. E.g. if `amount` is 10 the method
will return the 10 latest bugs.
Returns
-------
bugs : list of int
the bugnumbers
]
variable[reply] assign[=] call[name[_soap_client_call], parameter[constant[newest_bugs], name[amount]]]
variable[items_el] assign[=] call[name[reply], parameter[constant[soapenc:Array]]]
return[<ast.ListComp object at 0x7da1b1ffb010>] | keyword[def] identifier[newest_bugs] ( identifier[amount] ):
literal[string]
identifier[reply] = identifier[_soap_client_call] ( literal[string] , identifier[amount] )
identifier[items_el] = identifier[reply] ( literal[string] )
keyword[return] [ identifier[int] ( identifier[item_el] ) keyword[for] identifier[item_el] keyword[in] identifier[items_el] . identifier[children] () keyword[or] []] | def newest_bugs(amount):
"""Returns the newest bugs.
This method can be used to query the BTS for the n newest bugs.
Parameters
----------
amount : int
the number of desired bugs. E.g. if `amount` is 10 the method
will return the 10 latest bugs.
Returns
-------
bugs : list of int
the bugnumbers
"""
reply = _soap_client_call('newest_bugs', amount)
items_el = reply('soapenc:Array')
return [int(item_el) for item_el in items_el.children() or []] |
def createNode(self, cls, name, *args, **kw):
"""
Add a node of type cls to the graph if it does not already exist
by the given name
"""
m = self.findNode(name)
if m is None:
m = cls(name, *args, **kw)
self.addNode(m)
return m | def function[createNode, parameter[self, cls, name]]:
constant[
Add a node of type cls to the graph if it does not already exist
by the given name
]
variable[m] assign[=] call[name[self].findNode, parameter[name[name]]]
if compare[name[m] is constant[None]] begin[:]
variable[m] assign[=] call[name[cls], parameter[name[name], <ast.Starred object at 0x7da1b0e268c0>]]
call[name[self].addNode, parameter[name[m]]]
return[name[m]] | keyword[def] identifier[createNode] ( identifier[self] , identifier[cls] , identifier[name] ,* identifier[args] ,** identifier[kw] ):
literal[string]
identifier[m] = identifier[self] . identifier[findNode] ( identifier[name] )
keyword[if] identifier[m] keyword[is] keyword[None] :
identifier[m] = identifier[cls] ( identifier[name] ,* identifier[args] ,** identifier[kw] )
identifier[self] . identifier[addNode] ( identifier[m] )
keyword[return] identifier[m] | def createNode(self, cls, name, *args, **kw):
"""
Add a node of type cls to the graph if it does not already exist
by the given name
"""
m = self.findNode(name)
if m is None:
m = cls(name, *args, **kw)
self.addNode(m) # depends on [control=['if'], data=['m']]
return m |
def pop_back(self):
'''Remove the last element from the :class:`Sequence`.'''
backend = self.backend
return backend.execute(backend.structure(self).pop_back(),
self.value_pickler.loads) | def function[pop_back, parameter[self]]:
constant[Remove the last element from the :class:`Sequence`.]
variable[backend] assign[=] name[self].backend
return[call[name[backend].execute, parameter[call[call[name[backend].structure, parameter[name[self]]].pop_back, parameter[]], name[self].value_pickler.loads]]] | keyword[def] identifier[pop_back] ( identifier[self] ):
literal[string]
identifier[backend] = identifier[self] . identifier[backend]
keyword[return] identifier[backend] . identifier[execute] ( identifier[backend] . identifier[structure] ( identifier[self] ). identifier[pop_back] (),
identifier[self] . identifier[value_pickler] . identifier[loads] ) | def pop_back(self):
"""Remove the last element from the :class:`Sequence`."""
backend = self.backend
return backend.execute(backend.structure(self).pop_back(), self.value_pickler.loads) |
def chmod(hdfs_path, mode, user=None):
"""
Change file mode bits.
:type path: string
:param path: the path to the file or directory
:type mode: int
:param mode: the bitmask to set it to (e.g., 0777)
"""
host, port, path_ = path.split(hdfs_path, user)
fs = hdfs(host, port, user)
retval = fs.chmod(path_, mode)
fs.close()
return retval | def function[chmod, parameter[hdfs_path, mode, user]]:
constant[
Change file mode bits.
:type path: string
:param path: the path to the file or directory
:type mode: int
:param mode: the bitmask to set it to (e.g., 0777)
]
<ast.Tuple object at 0x7da1b13b4df0> assign[=] call[name[path].split, parameter[name[hdfs_path], name[user]]]
variable[fs] assign[=] call[name[hdfs], parameter[name[host], name[port], name[user]]]
variable[retval] assign[=] call[name[fs].chmod, parameter[name[path_], name[mode]]]
call[name[fs].close, parameter[]]
return[name[retval]] | keyword[def] identifier[chmod] ( identifier[hdfs_path] , identifier[mode] , identifier[user] = keyword[None] ):
literal[string]
identifier[host] , identifier[port] , identifier[path_] = identifier[path] . identifier[split] ( identifier[hdfs_path] , identifier[user] )
identifier[fs] = identifier[hdfs] ( identifier[host] , identifier[port] , identifier[user] )
identifier[retval] = identifier[fs] . identifier[chmod] ( identifier[path_] , identifier[mode] )
identifier[fs] . identifier[close] ()
keyword[return] identifier[retval] | def chmod(hdfs_path, mode, user=None):
"""
Change file mode bits.
:type path: string
:param path: the path to the file or directory
:type mode: int
:param mode: the bitmask to set it to (e.g., 0777)
"""
(host, port, path_) = path.split(hdfs_path, user)
fs = hdfs(host, port, user)
retval = fs.chmod(path_, mode)
fs.close()
return retval |
def _filename_global(self):
"""Create a .ini filename located in user home directory.
This .ini files stores the global spyder preferences.
"""
if self.subfolder is None:
config_file = osp.join(get_home_dir(), '.%s.ini' % self.name)
return config_file
else:
folder = get_conf_path()
# Save defaults in a "defaults" dir of .spyder2 to not pollute it
if 'defaults' in self.name:
folder = osp.join(folder, 'defaults')
if not osp.isdir(folder):
os.mkdir(folder)
config_file = osp.join(folder, '%s.ini' % self.name)
return config_file | def function[_filename_global, parameter[self]]:
constant[Create a .ini filename located in user home directory.
This .ini files stores the global spyder preferences.
]
if compare[name[self].subfolder is constant[None]] begin[:]
variable[config_file] assign[=] call[name[osp].join, parameter[call[name[get_home_dir], parameter[]], binary_operation[constant[.%s.ini] <ast.Mod object at 0x7da2590d6920> name[self].name]]]
return[name[config_file]] | keyword[def] identifier[_filename_global] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[subfolder] keyword[is] keyword[None] :
identifier[config_file] = identifier[osp] . identifier[join] ( identifier[get_home_dir] (), literal[string] % identifier[self] . identifier[name] )
keyword[return] identifier[config_file]
keyword[else] :
identifier[folder] = identifier[get_conf_path] ()
keyword[if] literal[string] keyword[in] identifier[self] . identifier[name] :
identifier[folder] = identifier[osp] . identifier[join] ( identifier[folder] , literal[string] )
keyword[if] keyword[not] identifier[osp] . identifier[isdir] ( identifier[folder] ):
identifier[os] . identifier[mkdir] ( identifier[folder] )
identifier[config_file] = identifier[osp] . identifier[join] ( identifier[folder] , literal[string] % identifier[self] . identifier[name] )
keyword[return] identifier[config_file] | def _filename_global(self):
"""Create a .ini filename located in user home directory.
This .ini files stores the global spyder preferences.
"""
if self.subfolder is None:
config_file = osp.join(get_home_dir(), '.%s.ini' % self.name)
return config_file # depends on [control=['if'], data=[]]
else:
folder = get_conf_path() # Save defaults in a "defaults" dir of .spyder2 to not pollute it
if 'defaults' in self.name:
folder = osp.join(folder, 'defaults')
if not osp.isdir(folder):
os.mkdir(folder) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
config_file = osp.join(folder, '%s.ini' % self.name)
return config_file |
def array(self, dimensions=None):
"""Convert dimension values to columnar array.
Args:
dimensions: List of dimensions to return
Returns:
Array of columns corresponding to each dimension
"""
if dimensions is None:
dims = [d for d in self.kdims + self.vdims]
else:
dims = [self.get_dimension(d, strict=True) for d in dimensions]
columns, types = [], []
for dim in dims:
column = self.dimension_values(dim)
columns.append(column)
types.append(column.dtype.kind)
if len(set(types)) > 1:
columns = [c.astype('object') for c in columns]
return np.column_stack(columns) | def function[array, parameter[self, dimensions]]:
constant[Convert dimension values to columnar array.
Args:
dimensions: List of dimensions to return
Returns:
Array of columns corresponding to each dimension
]
if compare[name[dimensions] is constant[None]] begin[:]
variable[dims] assign[=] <ast.ListComp object at 0x7da20c6a8eb0>
<ast.Tuple object at 0x7da20c6aa260> assign[=] tuple[[<ast.List object at 0x7da20c6aa3e0>, <ast.List object at 0x7da20c6a89d0>]]
for taget[name[dim]] in starred[name[dims]] begin[:]
variable[column] assign[=] call[name[self].dimension_values, parameter[name[dim]]]
call[name[columns].append, parameter[name[column]]]
call[name[types].append, parameter[name[column].dtype.kind]]
if compare[call[name[len], parameter[call[name[set], parameter[name[types]]]]] greater[>] constant[1]] begin[:]
variable[columns] assign[=] <ast.ListComp object at 0x7da20c6ab940>
return[call[name[np].column_stack, parameter[name[columns]]]] | keyword[def] identifier[array] ( identifier[self] , identifier[dimensions] = keyword[None] ):
literal[string]
keyword[if] identifier[dimensions] keyword[is] keyword[None] :
identifier[dims] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[self] . identifier[kdims] + identifier[self] . identifier[vdims] ]
keyword[else] :
identifier[dims] =[ identifier[self] . identifier[get_dimension] ( identifier[d] , identifier[strict] = keyword[True] ) keyword[for] identifier[d] keyword[in] identifier[dimensions] ]
identifier[columns] , identifier[types] =[],[]
keyword[for] identifier[dim] keyword[in] identifier[dims] :
identifier[column] = identifier[self] . identifier[dimension_values] ( identifier[dim] )
identifier[columns] . identifier[append] ( identifier[column] )
identifier[types] . identifier[append] ( identifier[column] . identifier[dtype] . identifier[kind] )
keyword[if] identifier[len] ( identifier[set] ( identifier[types] ))> literal[int] :
identifier[columns] =[ identifier[c] . identifier[astype] ( literal[string] ) keyword[for] identifier[c] keyword[in] identifier[columns] ]
keyword[return] identifier[np] . identifier[column_stack] ( identifier[columns] ) | def array(self, dimensions=None):
"""Convert dimension values to columnar array.
Args:
dimensions: List of dimensions to return
Returns:
Array of columns corresponding to each dimension
"""
if dimensions is None:
dims = [d for d in self.kdims + self.vdims] # depends on [control=['if'], data=[]]
else:
dims = [self.get_dimension(d, strict=True) for d in dimensions]
(columns, types) = ([], [])
for dim in dims:
column = self.dimension_values(dim)
columns.append(column)
types.append(column.dtype.kind) # depends on [control=['for'], data=['dim']]
if len(set(types)) > 1:
columns = [c.astype('object') for c in columns] # depends on [control=['if'], data=[]]
return np.column_stack(columns) |
def transform(self, X):
r'''
Computes the divergences from X to :attr:`features_`.
Parameters
----------
X : list of bag feature arrays or :class:`skl_groups.features.Features`
The bags to search "from".
Returns
-------
divs : array of shape ``[len(div_funcs), len(Ks), len(X), len(features_)] + ([2] if do_sym else [])``
The divergences from X to :attr:`features_`.
``divs[d, k, i, j]`` is the ``div_funcs[d]`` divergence
from ``X[i]`` to ``fetaures_[j]`` using a K of ``Ks[k]``.
If ``do_sym``, ``divs[d, k, i, j, 0]`` is
:math:`D_{d,k}( X_i \| \texttt{features_}_j)` and
``divs[d, k, i, j, 1]`` is :math:`D_{d,k}(\texttt{features_}_j \| X_i)`.
'''
X = as_features(X, stack=True, bare=True)
Y = self.features_
Ks = np.asarray(self.Ks)
if X.dim != Y.dim:
msg = "incompatible dimensions: fit with {}, transform with {}"
raise ValueError(msg.format(Y.dim, X.dim))
memory = self.memory
if isinstance(memory, string_types):
memory = Memory(cachedir=memory, verbose=0)
# ignore Y_indices to avoid slow pickling of them
# NOTE: if the indices are approximate, then might not get the same
# results!
est = memory.cache(_est_divs, ignore=['n_jobs', 'Y_indices', 'Y_rhos'])
output, self.rhos_ = est(
X, Y, self.indices_, getattr(self, 'rhos_', None),
self.div_funcs, Ks,
self.do_sym, self.clamp, self.version, self.min_dist,
self._flann_args(), self._n_jobs)
return output | def function[transform, parameter[self, X]]:
constant[
Computes the divergences from X to :attr:`features_`.
Parameters
----------
X : list of bag feature arrays or :class:`skl_groups.features.Features`
The bags to search "from".
Returns
-------
divs : array of shape ``[len(div_funcs), len(Ks), len(X), len(features_)] + ([2] if do_sym else [])``
The divergences from X to :attr:`features_`.
``divs[d, k, i, j]`` is the ``div_funcs[d]`` divergence
from ``X[i]`` to ``fetaures_[j]`` using a K of ``Ks[k]``.
If ``do_sym``, ``divs[d, k, i, j, 0]`` is
:math:`D_{d,k}( X_i \| \texttt{features_}_j)` and
``divs[d, k, i, j, 1]`` is :math:`D_{d,k}(\texttt{features_}_j \| X_i)`.
]
variable[X] assign[=] call[name[as_features], parameter[name[X]]]
variable[Y] assign[=] name[self].features_
variable[Ks] assign[=] call[name[np].asarray, parameter[name[self].Ks]]
if compare[name[X].dim not_equal[!=] name[Y].dim] begin[:]
variable[msg] assign[=] constant[incompatible dimensions: fit with {}, transform with {}]
<ast.Raise object at 0x7da2054a7490>
variable[memory] assign[=] name[self].memory
if call[name[isinstance], parameter[name[memory], name[string_types]]] begin[:]
variable[memory] assign[=] call[name[Memory], parameter[]]
variable[est] assign[=] call[name[memory].cache, parameter[name[_est_divs]]]
<ast.Tuple object at 0x7da2054a4ee0> assign[=] call[name[est], parameter[name[X], name[Y], name[self].indices_, call[name[getattr], parameter[name[self], constant[rhos_], constant[None]]], name[self].div_funcs, name[Ks], name[self].do_sym, name[self].clamp, name[self].version, name[self].min_dist, call[name[self]._flann_args, parameter[]], name[self]._n_jobs]]
return[name[output]] | keyword[def] identifier[transform] ( identifier[self] , identifier[X] ):
literal[string]
identifier[X] = identifier[as_features] ( identifier[X] , identifier[stack] = keyword[True] , identifier[bare] = keyword[True] )
identifier[Y] = identifier[self] . identifier[features_]
identifier[Ks] = identifier[np] . identifier[asarray] ( identifier[self] . identifier[Ks] )
keyword[if] identifier[X] . identifier[dim] != identifier[Y] . identifier[dim] :
identifier[msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[msg] . identifier[format] ( identifier[Y] . identifier[dim] , identifier[X] . identifier[dim] ))
identifier[memory] = identifier[self] . identifier[memory]
keyword[if] identifier[isinstance] ( identifier[memory] , identifier[string_types] ):
identifier[memory] = identifier[Memory] ( identifier[cachedir] = identifier[memory] , identifier[verbose] = literal[int] )
identifier[est] = identifier[memory] . identifier[cache] ( identifier[_est_divs] , identifier[ignore] =[ literal[string] , literal[string] , literal[string] ])
identifier[output] , identifier[self] . identifier[rhos_] = identifier[est] (
identifier[X] , identifier[Y] , identifier[self] . identifier[indices_] , identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ),
identifier[self] . identifier[div_funcs] , identifier[Ks] ,
identifier[self] . identifier[do_sym] , identifier[self] . identifier[clamp] , identifier[self] . identifier[version] , identifier[self] . identifier[min_dist] ,
identifier[self] . identifier[_flann_args] (), identifier[self] . identifier[_n_jobs] )
keyword[return] identifier[output] | def transform(self, X):
"""
Computes the divergences from X to :attr:`features_`.
Parameters
----------
X : list of bag feature arrays or :class:`skl_groups.features.Features`
The bags to search "from".
Returns
-------
divs : array of shape ``[len(div_funcs), len(Ks), len(X), len(features_)] + ([2] if do_sym else [])``
The divergences from X to :attr:`features_`.
``divs[d, k, i, j]`` is the ``div_funcs[d]`` divergence
from ``X[i]`` to ``fetaures_[j]`` using a K of ``Ks[k]``.
If ``do_sym``, ``divs[d, k, i, j, 0]`` is
:math:`D_{d,k}( X_i \\| \\texttt{features_}_j)` and
``divs[d, k, i, j, 1]`` is :math:`D_{d,k}(\\texttt{features_}_j \\| X_i)`.
"""
X = as_features(X, stack=True, bare=True)
Y = self.features_
Ks = np.asarray(self.Ks)
if X.dim != Y.dim:
msg = 'incompatible dimensions: fit with {}, transform with {}'
raise ValueError(msg.format(Y.dim, X.dim)) # depends on [control=['if'], data=[]]
memory = self.memory
if isinstance(memory, string_types):
memory = Memory(cachedir=memory, verbose=0) # depends on [control=['if'], data=[]]
# ignore Y_indices to avoid slow pickling of them
# NOTE: if the indices are approximate, then might not get the same
# results!
est = memory.cache(_est_divs, ignore=['n_jobs', 'Y_indices', 'Y_rhos'])
(output, self.rhos_) = est(X, Y, self.indices_, getattr(self, 'rhos_', None), self.div_funcs, Ks, self.do_sym, self.clamp, self.version, self.min_dist, self._flann_args(), self._n_jobs)
return output |
def file_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs) | def function[file_add_tags, parameter[object_id, input_params, always_retry]]:
constant[
Invokes the /file-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
]
return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/addTags] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]] | keyword[def] identifier[file_add_tags] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] ) | def file_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs) |
def _GetApprovals(self,
approval_type,
offset,
count,
filter_func=None,
token=None):
"""Gets all approvals for a given user and approval type.
Args:
approval_type: The type of approvals to get.
offset: The starting index within the collection.
count: The number of items to return.
filter_func: A predicate function, returning True if a specific approval
should be included in the result and False otherwise.
token: The token identifying the user.
Returns:
A list of approvals of the given approval type.
"""
approvals_base_urn = aff4.ROOT_URN.Add("users").Add(
token.username).Add("approvals").Add(approval_type)
all_children = aff4.FACTORY.RecursiveMultiListChildren([approvals_base_urn])
approvals_urns = []
for subject, children in all_children:
# We only want to process leaf nodes.
if children:
continue
approvals_urns.append(subject)
approvals_urns.sort(key=lambda x: x.age, reverse=True)
approvals = list(
aff4.FACTORY.MultiOpen(
approvals_urns,
mode="r",
aff4_type=aff4_security.Approval,
age=aff4.ALL_TIMES,
token=token))
approvals_by_urn = {}
for approval in approvals:
approvals_by_urn[approval.symlink_urn or approval.urn] = approval
cur_offset = 0
sorted_approvals = []
for approval_urn in approvals_urns:
try:
approval = approvals_by_urn[approval_urn]
except KeyError:
continue
if filter_func is not None and not filter_func(approval):
continue
cur_offset += 1
if cur_offset <= offset:
continue
if count and len(sorted_approvals) >= count:
break
sorted_approvals.append(approval)
subjects_urns = [a.Get(a.Schema.SUBJECT) for a in approvals]
subjects_by_urn = {}
for subject in aff4.FACTORY.MultiOpen(subjects_urns, mode="r", token=token):
subjects_by_urn[subject.urn] = subject
return sorted_approvals, subjects_by_urn | def function[_GetApprovals, parameter[self, approval_type, offset, count, filter_func, token]]:
constant[Gets all approvals for a given user and approval type.
Args:
approval_type: The type of approvals to get.
offset: The starting index within the collection.
count: The number of items to return.
filter_func: A predicate function, returning True if a specific approval
should be included in the result and False otherwise.
token: The token identifying the user.
Returns:
A list of approvals of the given approval type.
]
variable[approvals_base_urn] assign[=] call[call[call[call[name[aff4].ROOT_URN.Add, parameter[constant[users]]].Add, parameter[name[token].username]].Add, parameter[constant[approvals]]].Add, parameter[name[approval_type]]]
variable[all_children] assign[=] call[name[aff4].FACTORY.RecursiveMultiListChildren, parameter[list[[<ast.Name object at 0x7da1b1b46d10>]]]]
variable[approvals_urns] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1b474f0>, <ast.Name object at 0x7da1b1b47b50>]]] in starred[name[all_children]] begin[:]
if name[children] begin[:]
continue
call[name[approvals_urns].append, parameter[name[subject]]]
call[name[approvals_urns].sort, parameter[]]
variable[approvals] assign[=] call[name[list], parameter[call[name[aff4].FACTORY.MultiOpen, parameter[name[approvals_urns]]]]]
variable[approvals_by_urn] assign[=] dictionary[[], []]
for taget[name[approval]] in starred[name[approvals]] begin[:]
call[name[approvals_by_urn]][<ast.BoolOp object at 0x7da1b1b474c0>] assign[=] name[approval]
variable[cur_offset] assign[=] constant[0]
variable[sorted_approvals] assign[=] list[[]]
for taget[name[approval_urn]] in starred[name[approvals_urns]] begin[:]
<ast.Try object at 0x7da1b1b47ee0>
if <ast.BoolOp object at 0x7da1b1b455d0> begin[:]
continue
<ast.AugAssign object at 0x7da1b1b47190>
if compare[name[cur_offset] less_or_equal[<=] name[offset]] begin[:]
continue
if <ast.BoolOp object at 0x7da1b1b44850> begin[:]
break
call[name[sorted_approvals].append, parameter[name[approval]]]
variable[subjects_urns] assign[=] <ast.ListComp object at 0x7da1b1b456f0>
variable[subjects_by_urn] assign[=] dictionary[[], []]
for taget[name[subject]] in starred[call[name[aff4].FACTORY.MultiOpen, parameter[name[subjects_urns]]]] begin[:]
call[name[subjects_by_urn]][name[subject].urn] assign[=] name[subject]
return[tuple[[<ast.Name object at 0x7da1b1b44a90>, <ast.Name object at 0x7da1b1b47850>]]] | keyword[def] identifier[_GetApprovals] ( identifier[self] ,
identifier[approval_type] ,
identifier[offset] ,
identifier[count] ,
identifier[filter_func] = keyword[None] ,
identifier[token] = keyword[None] ):
literal[string]
identifier[approvals_base_urn] = identifier[aff4] . identifier[ROOT_URN] . identifier[Add] ( literal[string] ). identifier[Add] (
identifier[token] . identifier[username] ). identifier[Add] ( literal[string] ). identifier[Add] ( identifier[approval_type] )
identifier[all_children] = identifier[aff4] . identifier[FACTORY] . identifier[RecursiveMultiListChildren] ([ identifier[approvals_base_urn] ])
identifier[approvals_urns] =[]
keyword[for] identifier[subject] , identifier[children] keyword[in] identifier[all_children] :
keyword[if] identifier[children] :
keyword[continue]
identifier[approvals_urns] . identifier[append] ( identifier[subject] )
identifier[approvals_urns] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[age] , identifier[reverse] = keyword[True] )
identifier[approvals] = identifier[list] (
identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] (
identifier[approvals_urns] ,
identifier[mode] = literal[string] ,
identifier[aff4_type] = identifier[aff4_security] . identifier[Approval] ,
identifier[age] = identifier[aff4] . identifier[ALL_TIMES] ,
identifier[token] = identifier[token] ))
identifier[approvals_by_urn] ={}
keyword[for] identifier[approval] keyword[in] identifier[approvals] :
identifier[approvals_by_urn] [ identifier[approval] . identifier[symlink_urn] keyword[or] identifier[approval] . identifier[urn] ]= identifier[approval]
identifier[cur_offset] = literal[int]
identifier[sorted_approvals] =[]
keyword[for] identifier[approval_urn] keyword[in] identifier[approvals_urns] :
keyword[try] :
identifier[approval] = identifier[approvals_by_urn] [ identifier[approval_urn] ]
keyword[except] identifier[KeyError] :
keyword[continue]
keyword[if] identifier[filter_func] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[filter_func] ( identifier[approval] ):
keyword[continue]
identifier[cur_offset] += literal[int]
keyword[if] identifier[cur_offset] <= identifier[offset] :
keyword[continue]
keyword[if] identifier[count] keyword[and] identifier[len] ( identifier[sorted_approvals] )>= identifier[count] :
keyword[break]
identifier[sorted_approvals] . identifier[append] ( identifier[approval] )
identifier[subjects_urns] =[ identifier[a] . identifier[Get] ( identifier[a] . identifier[Schema] . identifier[SUBJECT] ) keyword[for] identifier[a] keyword[in] identifier[approvals] ]
identifier[subjects_by_urn] ={}
keyword[for] identifier[subject] keyword[in] identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] ( identifier[subjects_urns] , identifier[mode] = literal[string] , identifier[token] = identifier[token] ):
identifier[subjects_by_urn] [ identifier[subject] . identifier[urn] ]= identifier[subject]
keyword[return] identifier[sorted_approvals] , identifier[subjects_by_urn] | def _GetApprovals(self, approval_type, offset, count, filter_func=None, token=None):
"""Gets all approvals for a given user and approval type.
Args:
approval_type: The type of approvals to get.
offset: The starting index within the collection.
count: The number of items to return.
filter_func: A predicate function, returning True if a specific approval
should be included in the result and False otherwise.
token: The token identifying the user.
Returns:
A list of approvals of the given approval type.
"""
approvals_base_urn = aff4.ROOT_URN.Add('users').Add(token.username).Add('approvals').Add(approval_type)
all_children = aff4.FACTORY.RecursiveMultiListChildren([approvals_base_urn])
approvals_urns = []
for (subject, children) in all_children:
# We only want to process leaf nodes.
if children:
continue # depends on [control=['if'], data=[]]
approvals_urns.append(subject) # depends on [control=['for'], data=[]]
approvals_urns.sort(key=lambda x: x.age, reverse=True)
approvals = list(aff4.FACTORY.MultiOpen(approvals_urns, mode='r', aff4_type=aff4_security.Approval, age=aff4.ALL_TIMES, token=token))
approvals_by_urn = {}
for approval in approvals:
approvals_by_urn[approval.symlink_urn or approval.urn] = approval # depends on [control=['for'], data=['approval']]
cur_offset = 0
sorted_approvals = []
for approval_urn in approvals_urns:
try:
approval = approvals_by_urn[approval_urn] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]]
if filter_func is not None and (not filter_func(approval)):
continue # depends on [control=['if'], data=[]]
cur_offset += 1
if cur_offset <= offset:
continue # depends on [control=['if'], data=[]]
if count and len(sorted_approvals) >= count:
break # depends on [control=['if'], data=[]]
sorted_approvals.append(approval) # depends on [control=['for'], data=['approval_urn']]
subjects_urns = [a.Get(a.Schema.SUBJECT) for a in approvals]
subjects_by_urn = {}
for subject in aff4.FACTORY.MultiOpen(subjects_urns, mode='r', token=token):
subjects_by_urn[subject.urn] = subject # depends on [control=['for'], data=['subject']]
return (sorted_approvals, subjects_by_urn) |
def is_ip_filter(ip, options=None):
'''
Returns a bool telling if the passed IP is a valid IPv4 or IPv6 address.
'''
return is_ipv4_filter(ip, options=options) or is_ipv6_filter(ip, options=options) | def function[is_ip_filter, parameter[ip, options]]:
constant[
Returns a bool telling if the passed IP is a valid IPv4 or IPv6 address.
]
return[<ast.BoolOp object at 0x7da1b1f7b430>] | keyword[def] identifier[is_ip_filter] ( identifier[ip] , identifier[options] = keyword[None] ):
literal[string]
keyword[return] identifier[is_ipv4_filter] ( identifier[ip] , identifier[options] = identifier[options] ) keyword[or] identifier[is_ipv6_filter] ( identifier[ip] , identifier[options] = identifier[options] ) | def is_ip_filter(ip, options=None):
"""
Returns a bool telling if the passed IP is a valid IPv4 or IPv6 address.
"""
return is_ipv4_filter(ip, options=options) or is_ipv6_filter(ip, options=options) |
def make_nn_descent(dist, dist_args):
"""Create a numba accelerated version of nearest neighbor descent
specialised for the given distance metric and metric arguments. Numba
doesn't support higher order functions directly, but we can instead JIT
compile the version of NN-descent for any given metric.
Parameters
----------
dist: function
A numba JITd distance function which, given two arrays computes a
dissimilarity between them.
dist_args: tuple
Any extra arguments that need to be passed to the distance function
beyond the two arrays to be compared.
Returns
-------
A numba JITd function for nearest neighbor descent computation that is
specialised to the given metric.
"""
@numba.njit(parallel=True)
def nn_descent(
data,
n_neighbors,
rng_state,
max_candidates=50,
n_iters=10,
delta=0.001,
rho=0.5,
rp_tree_init=True,
leaf_array=None,
verbose=False,
):
n_vertices = data.shape[0]
current_graph = make_heap(data.shape[0], n_neighbors)
for i in range(data.shape[0]):
indices = rejection_sample(n_neighbors, data.shape[0], rng_state)
for j in range(indices.shape[0]):
d = dist(data[i], data[indices[j]], *dist_args)
heap_push(current_graph, i, d, indices[j], 1)
heap_push(current_graph, indices[j], d, i, 1)
if rp_tree_init:
for n in range(leaf_array.shape[0]):
for i in range(leaf_array.shape[1]):
if leaf_array[n, i] < 0:
break
for j in range(i + 1, leaf_array.shape[1]):
if leaf_array[n, j] < 0:
break
d = dist(
data[leaf_array[n, i]], data[leaf_array[n, j]], *dist_args
)
heap_push(
current_graph, leaf_array[n, i], d, leaf_array[n, j], 1
)
heap_push(
current_graph, leaf_array[n, j], d, leaf_array[n, i], 1
)
for n in range(n_iters):
if verbose:
print("\t", n, " / ", n_iters)
candidate_neighbors = build_candidates(
current_graph, n_vertices, n_neighbors, max_candidates, rng_state
)
c = 0
for i in range(n_vertices):
for j in range(max_candidates):
p = int(candidate_neighbors[0, i, j])
if p < 0 or tau_rand(rng_state) < rho:
continue
for k in range(max_candidates):
q = int(candidate_neighbors[0, i, k])
if (
q < 0
or not candidate_neighbors[2, i, j]
and not candidate_neighbors[2, i, k]
):
continue
d = dist(data[p], data[q], *dist_args)
c += heap_push(current_graph, p, d, q, 1)
c += heap_push(current_graph, q, d, p, 1)
if c <= delta * n_neighbors * data.shape[0]:
break
return deheap_sort(current_graph)
return nn_descent | def function[make_nn_descent, parameter[dist, dist_args]]:
constant[Create a numba accelerated version of nearest neighbor descent
specialised for the given distance metric and metric arguments. Numba
doesn't support higher order functions directly, but we can instead JIT
compile the version of NN-descent for any given metric.
Parameters
----------
dist: function
A numba JITd distance function which, given two arrays computes a
dissimilarity between them.
dist_args: tuple
Any extra arguments that need to be passed to the distance function
beyond the two arrays to be compared.
Returns
-------
A numba JITd function for nearest neighbor descent computation that is
specialised to the given metric.
]
def function[nn_descent, parameter[data, n_neighbors, rng_state, max_candidates, n_iters, delta, rho, rp_tree_init, leaf_array, verbose]]:
variable[n_vertices] assign[=] call[name[data].shape][constant[0]]
variable[current_graph] assign[=] call[name[make_heap], parameter[call[name[data].shape][constant[0]], name[n_neighbors]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[data].shape][constant[0]]]]] begin[:]
variable[indices] assign[=] call[name[rejection_sample], parameter[name[n_neighbors], call[name[data].shape][constant[0]], name[rng_state]]]
for taget[name[j]] in starred[call[name[range], parameter[call[name[indices].shape][constant[0]]]]] begin[:]
variable[d] assign[=] call[name[dist], parameter[call[name[data]][name[i]], call[name[data]][call[name[indices]][name[j]]], <ast.Starred object at 0x7da20cabf580>]]
call[name[heap_push], parameter[name[current_graph], name[i], name[d], call[name[indices]][name[j]], constant[1]]]
call[name[heap_push], parameter[name[current_graph], call[name[indices]][name[j]], name[d], name[i], constant[1]]]
if name[rp_tree_init] begin[:]
for taget[name[n]] in starred[call[name[range], parameter[call[name[leaf_array].shape][constant[0]]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[leaf_array].shape][constant[1]]]]] begin[:]
if compare[call[name[leaf_array]][tuple[[<ast.Name object at 0x7da20cabfeb0>, <ast.Name object at 0x7da20cabe590>]]] less[<] constant[0]] begin[:]
break
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], call[name[leaf_array].shape][constant[1]]]]] begin[:]
if compare[call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f810e80>, <ast.Name object at 0x7da18f8113f0>]]] less[<] constant[0]] begin[:]
break
variable[d] assign[=] call[name[dist], parameter[call[name[data]][call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f8133a0>, <ast.Name object at 0x7da18f813ee0>]]]], call[name[data]][call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f810f10>, <ast.Name object at 0x7da18f812e30>]]]], <ast.Starred object at 0x7da18f810760>]]
call[name[heap_push], parameter[name[current_graph], call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f8100a0>, <ast.Name object at 0x7da18f813970>]]], name[d], call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f811f60>, <ast.Name object at 0x7da18f811f90>]]], constant[1]]]
call[name[heap_push], parameter[name[current_graph], call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f812aa0>, <ast.Name object at 0x7da18f812200>]]], name[d], call[name[leaf_array]][tuple[[<ast.Name object at 0x7da18f811b40>, <ast.Name object at 0x7da18f813160>]]], constant[1]]]
for taget[name[n]] in starred[call[name[range], parameter[name[n_iters]]]] begin[:]
if name[verbose] begin[:]
call[name[print], parameter[constant[ ], name[n], constant[ / ], name[n_iters]]]
variable[candidate_neighbors] assign[=] call[name[build_candidates], parameter[name[current_graph], name[n_vertices], name[n_neighbors], name[max_candidates], name[rng_state]]]
variable[c] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[name[n_vertices]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[max_candidates]]]] begin[:]
variable[p] assign[=] call[name[int], parameter[call[name[candidate_neighbors]][tuple[[<ast.Constant object at 0x7da18f810a30>, <ast.Name object at 0x7da18f810df0>, <ast.Name object at 0x7da18f813d90>]]]]]
if <ast.BoolOp object at 0x7da18f813a00> begin[:]
continue
for taget[name[k]] in starred[call[name[range], parameter[name[max_candidates]]]] begin[:]
variable[q] assign[=] call[name[int], parameter[call[name[candidate_neighbors]][tuple[[<ast.Constant object at 0x7da18f811660>, <ast.Name object at 0x7da18f811000>, <ast.Name object at 0x7da2046201f0>]]]]]
if <ast.BoolOp object at 0x7da204621150> begin[:]
continue
variable[d] assign[=] call[name[dist], parameter[call[name[data]][name[p]], call[name[data]][name[q]], <ast.Starred object at 0x7da2046202e0>]]
<ast.AugAssign object at 0x7da204623c40>
<ast.AugAssign object at 0x7da204621c60>
if compare[name[c] less_or_equal[<=] binary_operation[binary_operation[name[delta] * name[n_neighbors]] * call[name[data].shape][constant[0]]]] begin[:]
break
return[call[name[deheap_sort], parameter[name[current_graph]]]]
return[name[nn_descent]] | keyword[def] identifier[make_nn_descent] ( identifier[dist] , identifier[dist_args] ):
literal[string]
@ identifier[numba] . identifier[njit] ( identifier[parallel] = keyword[True] )
keyword[def] identifier[nn_descent] (
identifier[data] ,
identifier[n_neighbors] ,
identifier[rng_state] ,
identifier[max_candidates] = literal[int] ,
identifier[n_iters] = literal[int] ,
identifier[delta] = literal[int] ,
identifier[rho] = literal[int] ,
identifier[rp_tree_init] = keyword[True] ,
identifier[leaf_array] = keyword[None] ,
identifier[verbose] = keyword[False] ,
):
identifier[n_vertices] = identifier[data] . identifier[shape] [ literal[int] ]
identifier[current_graph] = identifier[make_heap] ( identifier[data] . identifier[shape] [ literal[int] ], identifier[n_neighbors] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]):
identifier[indices] = identifier[rejection_sample] ( identifier[n_neighbors] , identifier[data] . identifier[shape] [ literal[int] ], identifier[rng_state] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[indices] . identifier[shape] [ literal[int] ]):
identifier[d] = identifier[dist] ( identifier[data] [ identifier[i] ], identifier[data] [ identifier[indices] [ identifier[j] ]],* identifier[dist_args] )
identifier[heap_push] ( identifier[current_graph] , identifier[i] , identifier[d] , identifier[indices] [ identifier[j] ], literal[int] )
identifier[heap_push] ( identifier[current_graph] , identifier[indices] [ identifier[j] ], identifier[d] , identifier[i] , literal[int] )
keyword[if] identifier[rp_tree_init] :
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[leaf_array] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[leaf_array] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[leaf_array] [ identifier[n] , identifier[i] ]< literal[int] :
keyword[break]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[leaf_array] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[leaf_array] [ identifier[n] , identifier[j] ]< literal[int] :
keyword[break]
identifier[d] = identifier[dist] (
identifier[data] [ identifier[leaf_array] [ identifier[n] , identifier[i] ]], identifier[data] [ identifier[leaf_array] [ identifier[n] , identifier[j] ]],* identifier[dist_args]
)
identifier[heap_push] (
identifier[current_graph] , identifier[leaf_array] [ identifier[n] , identifier[i] ], identifier[d] , identifier[leaf_array] [ identifier[n] , identifier[j] ], literal[int]
)
identifier[heap_push] (
identifier[current_graph] , identifier[leaf_array] [ identifier[n] , identifier[j] ], identifier[d] , identifier[leaf_array] [ identifier[n] , identifier[i] ], literal[int]
)
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[n_iters] ):
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] , identifier[n] , literal[string] , identifier[n_iters] )
identifier[candidate_neighbors] = identifier[build_candidates] (
identifier[current_graph] , identifier[n_vertices] , identifier[n_neighbors] , identifier[max_candidates] , identifier[rng_state]
)
identifier[c] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_vertices] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[max_candidates] ):
identifier[p] = identifier[int] ( identifier[candidate_neighbors] [ literal[int] , identifier[i] , identifier[j] ])
keyword[if] identifier[p] < literal[int] keyword[or] identifier[tau_rand] ( identifier[rng_state] )< identifier[rho] :
keyword[continue]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[max_candidates] ):
identifier[q] = identifier[int] ( identifier[candidate_neighbors] [ literal[int] , identifier[i] , identifier[k] ])
keyword[if] (
identifier[q] < literal[int]
keyword[or] keyword[not] identifier[candidate_neighbors] [ literal[int] , identifier[i] , identifier[j] ]
keyword[and] keyword[not] identifier[candidate_neighbors] [ literal[int] , identifier[i] , identifier[k] ]
):
keyword[continue]
identifier[d] = identifier[dist] ( identifier[data] [ identifier[p] ], identifier[data] [ identifier[q] ],* identifier[dist_args] )
identifier[c] += identifier[heap_push] ( identifier[current_graph] , identifier[p] , identifier[d] , identifier[q] , literal[int] )
identifier[c] += identifier[heap_push] ( identifier[current_graph] , identifier[q] , identifier[d] , identifier[p] , literal[int] )
keyword[if] identifier[c] <= identifier[delta] * identifier[n_neighbors] * identifier[data] . identifier[shape] [ literal[int] ]:
keyword[break]
keyword[return] identifier[deheap_sort] ( identifier[current_graph] )
keyword[return] identifier[nn_descent] | def make_nn_descent(dist, dist_args):
"""Create a numba accelerated version of nearest neighbor descent
specialised for the given distance metric and metric arguments. Numba
doesn't support higher order functions directly, but we can instead JIT
compile the version of NN-descent for any given metric.
Parameters
----------
dist: function
A numba JITd distance function which, given two arrays computes a
dissimilarity between them.
dist_args: tuple
Any extra arguments that need to be passed to the distance function
beyond the two arrays to be compared.
Returns
-------
A numba JITd function for nearest neighbor descent computation that is
specialised to the given metric.
"""
@numba.njit(parallel=True)
def nn_descent(data, n_neighbors, rng_state, max_candidates=50, n_iters=10, delta=0.001, rho=0.5, rp_tree_init=True, leaf_array=None, verbose=False):
n_vertices = data.shape[0]
current_graph = make_heap(data.shape[0], n_neighbors)
for i in range(data.shape[0]):
indices = rejection_sample(n_neighbors, data.shape[0], rng_state)
for j in range(indices.shape[0]):
d = dist(data[i], data[indices[j]], *dist_args)
heap_push(current_graph, i, d, indices[j], 1)
heap_push(current_graph, indices[j], d, i, 1) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
if rp_tree_init:
for n in range(leaf_array.shape[0]):
for i in range(leaf_array.shape[1]):
if leaf_array[n, i] < 0:
break # depends on [control=['if'], data=[]]
for j in range(i + 1, leaf_array.shape[1]):
if leaf_array[n, j] < 0:
break # depends on [control=['if'], data=[]]
d = dist(data[leaf_array[n, i]], data[leaf_array[n, j]], *dist_args)
heap_push(current_graph, leaf_array[n, i], d, leaf_array[n, j], 1)
heap_push(current_graph, leaf_array[n, j], d, leaf_array[n, i], 1) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=[]]
for n in range(n_iters):
if verbose:
print('\t', n, ' / ', n_iters) # depends on [control=['if'], data=[]]
candidate_neighbors = build_candidates(current_graph, n_vertices, n_neighbors, max_candidates, rng_state)
c = 0
for i in range(n_vertices):
for j in range(max_candidates):
p = int(candidate_neighbors[0, i, j])
if p < 0 or tau_rand(rng_state) < rho:
continue # depends on [control=['if'], data=[]]
for k in range(max_candidates):
q = int(candidate_neighbors[0, i, k])
if q < 0 or (not candidate_neighbors[2, i, j] and (not candidate_neighbors[2, i, k])):
continue # depends on [control=['if'], data=[]]
d = dist(data[p], data[q], *dist_args)
c += heap_push(current_graph, p, d, q, 1)
c += heap_push(current_graph, q, d, p, 1) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
if c <= delta * n_neighbors * data.shape[0]:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
return deheap_sort(current_graph)
return nn_descent |
def iptag_set(self, iptag, addr, port, x, y):
"""Set the value of an IPTag.
Forward SDP packets with the specified IP tag sent by a SpiNNaker
application to a given external IP address.
A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags
to send and receive SDP packets to and from applications is also
available.
Parameters
----------
iptag : int
Index of the IPTag to set
addr : string
IP address or hostname that the IPTag should point at.
port : int
UDP port that the IPTag should direct packets to.
"""
# Format the IP address
ip_addr = struct.pack('!4B',
*map(int, socket.gethostbyname(addr).split('.')))
self._send_scp(x, y, 0, SCPCommands.iptag,
int(consts.IPTagCommands.set) << 16 | iptag,
port, struct.unpack('<I', ip_addr)[0]) | def function[iptag_set, parameter[self, iptag, addr, port, x, y]]:
constant[Set the value of an IPTag.
Forward SDP packets with the specified IP tag sent by a SpiNNaker
application to a given external IP address.
A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags
to send and receive SDP packets to and from applications is also
available.
Parameters
----------
iptag : int
Index of the IPTag to set
addr : string
IP address or hostname that the IPTag should point at.
port : int
UDP port that the IPTag should direct packets to.
]
variable[ip_addr] assign[=] call[name[struct].pack, parameter[constant[!4B], <ast.Starred object at 0x7da1b196aef0>]]
call[name[self]._send_scp, parameter[name[x], name[y], constant[0], name[SCPCommands].iptag, binary_operation[binary_operation[call[name[int], parameter[name[consts].IPTagCommands.set]] <ast.LShift object at 0x7da2590d69e0> constant[16]] <ast.BitOr object at 0x7da2590d6aa0> name[iptag]], name[port], call[call[name[struct].unpack, parameter[constant[<I], name[ip_addr]]]][constant[0]]]] | keyword[def] identifier[iptag_set] ( identifier[self] , identifier[iptag] , identifier[addr] , identifier[port] , identifier[x] , identifier[y] ):
literal[string]
identifier[ip_addr] = identifier[struct] . identifier[pack] ( literal[string] ,
* identifier[map] ( identifier[int] , identifier[socket] . identifier[gethostbyname] ( identifier[addr] ). identifier[split] ( literal[string] )))
identifier[self] . identifier[_send_scp] ( identifier[x] , identifier[y] , literal[int] , identifier[SCPCommands] . identifier[iptag] ,
identifier[int] ( identifier[consts] . identifier[IPTagCommands] . identifier[set] )<< literal[int] | identifier[iptag] ,
identifier[port] , identifier[struct] . identifier[unpack] ( literal[string] , identifier[ip_addr] )[ literal[int] ]) | def iptag_set(self, iptag, addr, port, x, y):
"""Set the value of an IPTag.
Forward SDP packets with the specified IP tag sent by a SpiNNaker
application to a given external IP address.
A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags
to send and receive SDP packets to and from applications is also
available.
Parameters
----------
iptag : int
Index of the IPTag to set
addr : string
IP address or hostname that the IPTag should point at.
port : int
UDP port that the IPTag should direct packets to.
"""
# Format the IP address
ip_addr = struct.pack('!4B', *map(int, socket.gethostbyname(addr).split('.')))
self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.set) << 16 | iptag, port, struct.unpack('<I', ip_addr)[0]) |
def retry(n, errors, wait=0.0, logger_name=None):
"""This is a decorator that retries a function.
Tries `n` times and catches a given tuple of `errors`.
If the `n` retries are not enough, the error is reraised.
If desired `waits` some seconds.
Optionally takes a 'logger_name' of a given logger to print the caught error.
"""
def wrapper(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
retries = 0
while True:
try:
result = func(*args, **kwargs)
if retries and logger_name:
logger = logging.getLogger(logger_name)
logger.debug('Retry of `%s` successful' % func.__name__)
return result
except errors:
if retries >= n:
if logger_name:
logger = logging.getLogger(logger_name)
logger.exception('I could not execute `%s` with args %s and kwargs %s, '
'starting next try. ' % (func.__name__,
str(args),
str(kwargs)))
raise
elif logger_name:
logger = logging.getLogger(logger_name)
logger.debug('I could not execute `%s` with args %s and kwargs %s, '
'starting next try. ' % (func.__name__,
str(args),
str(kwargs)))
retries += 1
if wait:
time.sleep(wait)
return new_func
return wrapper | def function[retry, parameter[n, errors, wait, logger_name]]:
constant[This is a decorator that retries a function.
Tries `n` times and catches a given tuple of `errors`.
If the `n` retries are not enough, the error is reraised.
If desired `waits` some seconds.
Optionally takes a 'logger_name' of a given logger to print the caught error.
]
def function[wrapper, parameter[func]]:
def function[new_func, parameter[]]:
variable[retries] assign[=] constant[0]
while constant[True] begin[:]
<ast.Try object at 0x7da1b01e27d0>
return[name[new_func]]
return[name[wrapper]] | keyword[def] identifier[retry] ( identifier[n] , identifier[errors] , identifier[wait] = literal[int] , identifier[logger_name] = keyword[None] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[func] ):
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[new_func] (* identifier[args] ,** identifier[kwargs] ):
identifier[retries] = literal[int]
keyword[while] keyword[True] :
keyword[try] :
identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[retries] keyword[and] identifier[logger_name] :
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[logger_name] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[func] . identifier[__name__] )
keyword[return] identifier[result]
keyword[except] identifier[errors] :
keyword[if] identifier[retries] >= identifier[n] :
keyword[if] identifier[logger_name] :
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[logger_name] )
identifier[logger] . identifier[exception] ( literal[string]
literal[string] %( identifier[func] . identifier[__name__] ,
identifier[str] ( identifier[args] ),
identifier[str] ( identifier[kwargs] )))
keyword[raise]
keyword[elif] identifier[logger_name] :
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[logger_name] )
identifier[logger] . identifier[debug] ( literal[string]
literal[string] %( identifier[func] . identifier[__name__] ,
identifier[str] ( identifier[args] ),
identifier[str] ( identifier[kwargs] )))
identifier[retries] += literal[int]
keyword[if] identifier[wait] :
identifier[time] . identifier[sleep] ( identifier[wait] )
keyword[return] identifier[new_func]
keyword[return] identifier[wrapper] | def retry(n, errors, wait=0.0, logger_name=None):
"""This is a decorator that retries a function.
Tries `n` times and catches a given tuple of `errors`.
If the `n` retries are not enough, the error is reraised.
If desired `waits` some seconds.
Optionally takes a 'logger_name' of a given logger to print the caught error.
"""
def wrapper(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
retries = 0
while True:
try:
result = func(*args, **kwargs)
if retries and logger_name:
logger = logging.getLogger(logger_name)
logger.debug('Retry of `%s` successful' % func.__name__) # depends on [control=['if'], data=[]]
return result # depends on [control=['try'], data=[]]
except errors:
if retries >= n:
if logger_name:
logger = logging.getLogger(logger_name)
logger.exception('I could not execute `%s` with args %s and kwargs %s, starting next try. ' % (func.__name__, str(args), str(kwargs))) # depends on [control=['if'], data=[]]
raise # depends on [control=['if'], data=[]]
elif logger_name:
logger = logging.getLogger(logger_name)
logger.debug('I could not execute `%s` with args %s and kwargs %s, starting next try. ' % (func.__name__, str(args), str(kwargs))) # depends on [control=['if'], data=[]]
retries += 1
if wait:
time.sleep(wait) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
return new_func
return wrapper |
def evaluate_marker(text, extra=None):
"""
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'pyparsing' module.
"""
try:
marker = packaging.markers.Marker(text)
return marker.evaluate()
except packaging.markers.InvalidMarker as e:
raise SyntaxError(e) | def function[evaluate_marker, parameter[text, extra]]:
constant[
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'pyparsing' module.
]
<ast.Try object at 0x7da2054a4a00> | keyword[def] identifier[evaluate_marker] ( identifier[text] , identifier[extra] = keyword[None] ):
literal[string]
keyword[try] :
identifier[marker] = identifier[packaging] . identifier[markers] . identifier[Marker] ( identifier[text] )
keyword[return] identifier[marker] . identifier[evaluate] ()
keyword[except] identifier[packaging] . identifier[markers] . identifier[InvalidMarker] keyword[as] identifier[e] :
keyword[raise] identifier[SyntaxError] ( identifier[e] ) | def evaluate_marker(text, extra=None):
"""
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'pyparsing' module.
"""
try:
marker = packaging.markers.Marker(text)
return marker.evaluate() # depends on [control=['try'], data=[]]
except packaging.markers.InvalidMarker as e:
raise SyntaxError(e) # depends on [control=['except'], data=['e']] |
def brew_recipe(recipe_name):
"""Returns a pipeline string from a recipe name.
Parameters
----------
recipe_name : str
Name of the recipe. Must match the name attribute in one of the classes
defined in :mod:`flowcraft.generator.recipes`
Returns
-------
str
Pipeline string ready for parsing and processing by flowcraft engine
"""
# This will iterate over all modules included in the recipes subpackage
# It will return the import class and the module name, algon with the
# correct prefix
prefix = "{}.".format(recipes.__name__)
for importer, modname, _ in pkgutil.iter_modules(recipes.__path__, prefix):
# Import the current module
_module = importer.find_module(modname).load_module(modname)
# Fetch all available classes in module
_recipe_classes = [cls for cls in _module.__dict__.values() if
isinstance(cls, type)]
# Iterate over each Recipe class, and check for a match with the
# provided recipe name.
for cls in _recipe_classes:
# Create instance of class to allow fetching the name attribute
recipe_cls = cls()
if getattr(recipe_cls, "name", None) == recipe_name:
return recipe_cls.brew()
logger.error(
colored_print("Recipe name '{}' does not exist.".format(recipe_name))
)
sys.exit(1) | def function[brew_recipe, parameter[recipe_name]]:
constant[Returns a pipeline string from a recipe name.
Parameters
----------
recipe_name : str
Name of the recipe. Must match the name attribute in one of the classes
defined in :mod:`flowcraft.generator.recipes`
Returns
-------
str
Pipeline string ready for parsing and processing by flowcraft engine
]
variable[prefix] assign[=] call[constant[{}.].format, parameter[name[recipes].__name__]]
for taget[tuple[[<ast.Name object at 0x7da1b0394820>, <ast.Name object at 0x7da1b03954e0>, <ast.Name object at 0x7da1b03958a0>]]] in starred[call[name[pkgutil].iter_modules, parameter[name[recipes].__path__, name[prefix]]]] begin[:]
variable[_module] assign[=] call[call[name[importer].find_module, parameter[name[modname]]].load_module, parameter[name[modname]]]
variable[_recipe_classes] assign[=] <ast.ListComp object at 0x7da1b03fab00>
for taget[name[cls]] in starred[name[_recipe_classes]] begin[:]
variable[recipe_cls] assign[=] call[name[cls], parameter[]]
if compare[call[name[getattr], parameter[name[recipe_cls], constant[name], constant[None]]] equal[==] name[recipe_name]] begin[:]
return[call[name[recipe_cls].brew, parameter[]]]
call[name[logger].error, parameter[call[name[colored_print], parameter[call[constant[Recipe name '{}' does not exist.].format, parameter[name[recipe_name]]]]]]]
call[name[sys].exit, parameter[constant[1]]] | keyword[def] identifier[brew_recipe] ( identifier[recipe_name] ):
literal[string]
identifier[prefix] = literal[string] . identifier[format] ( identifier[recipes] . identifier[__name__] )
keyword[for] identifier[importer] , identifier[modname] , identifier[_] keyword[in] identifier[pkgutil] . identifier[iter_modules] ( identifier[recipes] . identifier[__path__] , identifier[prefix] ):
identifier[_module] = identifier[importer] . identifier[find_module] ( identifier[modname] ). identifier[load_module] ( identifier[modname] )
identifier[_recipe_classes] =[ identifier[cls] keyword[for] identifier[cls] keyword[in] identifier[_module] . identifier[__dict__] . identifier[values] () keyword[if]
identifier[isinstance] ( identifier[cls] , identifier[type] )]
keyword[for] identifier[cls] keyword[in] identifier[_recipe_classes] :
identifier[recipe_cls] = identifier[cls] ()
keyword[if] identifier[getattr] ( identifier[recipe_cls] , literal[string] , keyword[None] )== identifier[recipe_name] :
keyword[return] identifier[recipe_cls] . identifier[brew] ()
identifier[logger] . identifier[error] (
identifier[colored_print] ( literal[string] . identifier[format] ( identifier[recipe_name] ))
)
identifier[sys] . identifier[exit] ( literal[int] ) | def brew_recipe(recipe_name):
"""Returns a pipeline string from a recipe name.
Parameters
----------
recipe_name : str
Name of the recipe. Must match the name attribute in one of the classes
defined in :mod:`flowcraft.generator.recipes`
Returns
-------
str
Pipeline string ready for parsing and processing by flowcraft engine
"""
# This will iterate over all modules included in the recipes subpackage
# It will return the import class and the module name, algon with the
# correct prefix
prefix = '{}.'.format(recipes.__name__)
for (importer, modname, _) in pkgutil.iter_modules(recipes.__path__, prefix):
# Import the current module
_module = importer.find_module(modname).load_module(modname)
# Fetch all available classes in module
_recipe_classes = [cls for cls in _module.__dict__.values() if isinstance(cls, type)]
# Iterate over each Recipe class, and check for a match with the
# provided recipe name.
for cls in _recipe_classes:
# Create instance of class to allow fetching the name attribute
recipe_cls = cls()
if getattr(recipe_cls, 'name', None) == recipe_name:
return recipe_cls.brew() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls']] # depends on [control=['for'], data=[]]
logger.error(colored_print("Recipe name '{}' does not exist.".format(recipe_name)))
sys.exit(1) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.