code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def files(xscript=0, yscript=1, eyscript=None, exscript=None, g=None, plotter=xy_databoxes, paths=None, **kwargs):
"""
This will load a bunch of data files, generate data based on the supplied
scripts, and then plot this data using the specified databox plotter.
xscript, yscript, eyscript, exscript scripts to generate x, y, and errors
g optional dictionary of globals
optional: filters="*.*" to set the file filters for the dialog.
**kwargs are sent to plotter()
"""
if 'delimiter' in kwargs: delimiter = kwargs.pop('delimiter')
else: delimiter = None
if 'filters' in kwargs: filters = kwargs.pop('filters')
else: filters = '*.*'
ds = _data.load_multiple(paths=paths, delimiter=delimiter, filters=filters)
if ds is None or len(ds) == 0: return
# generate a default title (the directory)
if 'title' not in kwargs: kwargs['title']=_os.path.split(ds[0].path)[0]
# run the databox plotter
plotter(ds, xscript=xscript, yscript=yscript, eyscript=eyscript, exscript=exscript, g=g, **kwargs)
return ds
|
def function[files, parameter[xscript, yscript, eyscript, exscript, g, plotter, paths]]:
constant[
This will load a bunch of data files, generate data based on the supplied
scripts, and then plot this data using the specified databox plotter.
xscript, yscript, eyscript, exscript scripts to generate x, y, and errors
g optional dictionary of globals
optional: filters="*.*" to set the file filters for the dialog.
**kwargs are sent to plotter()
]
if compare[constant[delimiter] in name[kwargs]] begin[:]
variable[delimiter] assign[=] call[name[kwargs].pop, parameter[constant[delimiter]]]
if compare[constant[filters] in name[kwargs]] begin[:]
variable[filters] assign[=] call[name[kwargs].pop, parameter[constant[filters]]]
variable[ds] assign[=] call[name[_data].load_multiple, parameter[]]
if <ast.BoolOp object at 0x7da1b1a46500> begin[:]
return[None]
if compare[constant[title] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[title]] assign[=] call[call[name[_os].path.split, parameter[call[name[ds]][constant[0]].path]]][constant[0]]
call[name[plotter], parameter[name[ds]]]
return[name[ds]]
|
keyword[def] identifier[files] ( identifier[xscript] = literal[int] , identifier[yscript] = literal[int] , identifier[eyscript] = keyword[None] , identifier[exscript] = keyword[None] , identifier[g] = keyword[None] , identifier[plotter] = identifier[xy_databoxes] , identifier[paths] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[delimiter] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[else] : identifier[delimiter] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[filters] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[else] : identifier[filters] = literal[string]
identifier[ds] = identifier[_data] . identifier[load_multiple] ( identifier[paths] = identifier[paths] , identifier[delimiter] = identifier[delimiter] , identifier[filters] = identifier[filters] )
keyword[if] identifier[ds] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[ds] )== literal[int] : keyword[return]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[_os] . identifier[path] . identifier[split] ( identifier[ds] [ literal[int] ]. identifier[path] )[ literal[int] ]
identifier[plotter] ( identifier[ds] , identifier[xscript] = identifier[xscript] , identifier[yscript] = identifier[yscript] , identifier[eyscript] = identifier[eyscript] , identifier[exscript] = identifier[exscript] , identifier[g] = identifier[g] ,** identifier[kwargs] )
keyword[return] identifier[ds]
|
def files(xscript=0, yscript=1, eyscript=None, exscript=None, g=None, plotter=xy_databoxes, paths=None, **kwargs):
"""
This will load a bunch of data files, generate data based on the supplied
scripts, and then plot this data using the specified databox plotter.
xscript, yscript, eyscript, exscript scripts to generate x, y, and errors
g optional dictionary of globals
optional: filters="*.*" to set the file filters for the dialog.
**kwargs are sent to plotter()
"""
if 'delimiter' in kwargs:
delimiter = kwargs.pop('delimiter') # depends on [control=['if'], data=['kwargs']]
else:
delimiter = None
if 'filters' in kwargs:
filters = kwargs.pop('filters') # depends on [control=['if'], data=['kwargs']]
else:
filters = '*.*'
ds = _data.load_multiple(paths=paths, delimiter=delimiter, filters=filters)
if ds is None or len(ds) == 0:
return # depends on [control=['if'], data=[]]
# generate a default title (the directory)
if 'title' not in kwargs:
kwargs['title'] = _os.path.split(ds[0].path)[0] # depends on [control=['if'], data=['kwargs']]
# run the databox plotter
plotter(ds, xscript=xscript, yscript=yscript, eyscript=eyscript, exscript=exscript, g=g, **kwargs)
return ds
|
def already_coords(self, address):
"""test used to see if we have coordinates or address"""
m = re.search(self.COORD_MATCH, address)
return (m != None)
|
def function[already_coords, parameter[self, address]]:
constant[test used to see if we have coordinates or address]
variable[m] assign[=] call[name[re].search, parameter[name[self].COORD_MATCH, name[address]]]
return[compare[name[m] not_equal[!=] constant[None]]]
|
keyword[def] identifier[already_coords] ( identifier[self] , identifier[address] ):
literal[string]
identifier[m] = identifier[re] . identifier[search] ( identifier[self] . identifier[COORD_MATCH] , identifier[address] )
keyword[return] ( identifier[m] != keyword[None] )
|
def already_coords(self, address):
"""test used to see if we have coordinates or address"""
m = re.search(self.COORD_MATCH, address)
return m != None
|
def parse(self, limit=None):
"""
IMPC data is delivered in three separate csv files OR
in one integrated file, each with the same file format.
:param limit:
:return:
"""
if limit is not None:
LOG.info("Only parsing first %s rows fo each file", str(limit))
LOG.info("Parsing files...")
if self.test_only:
self.test_mode = True
# for f in ['impc', 'euro', 'mgd', '3i']:
for f in ['all']:
file = '/'.join((self.rawdir, self.files[f]['file']))
self._process_data(file, limit)
LOG.info("Finished parsing")
return
|
def function[parse, parameter[self, limit]]:
constant[
IMPC data is delivered in three separate csv files OR
in one integrated file, each with the same file format.
:param limit:
:return:
]
if compare[name[limit] is_not constant[None]] begin[:]
call[name[LOG].info, parameter[constant[Only parsing first %s rows fo each file], call[name[str], parameter[name[limit]]]]]
call[name[LOG].info, parameter[constant[Parsing files...]]]
if name[self].test_only begin[:]
name[self].test_mode assign[=] constant[True]
for taget[name[f]] in starred[list[[<ast.Constant object at 0x7da207f03e50>]]] begin[:]
variable[file] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da207f00250>, <ast.Subscript object at 0x7da207f02d70>]]]]
call[name[self]._process_data, parameter[name[file], name[limit]]]
call[name[LOG].info, parameter[constant[Finished parsing]]]
return[None]
|
keyword[def] identifier[parse] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[str] ( identifier[limit] ))
identifier[LOG] . identifier[info] ( literal[string] )
keyword[if] identifier[self] . identifier[test_only] :
identifier[self] . identifier[test_mode] = keyword[True]
keyword[for] identifier[f] keyword[in] [ literal[string] ]:
identifier[file] = literal[string] . identifier[join] (( identifier[self] . identifier[rawdir] , identifier[self] . identifier[files] [ identifier[f] ][ literal[string] ]))
identifier[self] . identifier[_process_data] ( identifier[file] , identifier[limit] )
identifier[LOG] . identifier[info] ( literal[string] )
keyword[return]
|
def parse(self, limit=None):
"""
IMPC data is delivered in three separate csv files OR
in one integrated file, each with the same file format.
:param limit:
:return:
"""
if limit is not None:
LOG.info('Only parsing first %s rows fo each file', str(limit)) # depends on [control=['if'], data=['limit']]
LOG.info('Parsing files...')
if self.test_only:
self.test_mode = True # depends on [control=['if'], data=[]]
# for f in ['impc', 'euro', 'mgd', '3i']:
for f in ['all']:
file = '/'.join((self.rawdir, self.files[f]['file']))
self._process_data(file, limit) # depends on [control=['for'], data=['f']]
LOG.info('Finished parsing')
return
|
async def recv_trailing_metadata(self):
"""Coroutine to wait for trailers with trailing metadata from the
server.
.. note:: This coroutine will be called implicitly at exit from
this call (context manager's exit), if not called before explicitly.
May raise :py:class:`~grpclib.exceptions.GRPCError` if server returned
non-:py:attr:`Status.OK <grpclib.const.Status.OK>` in trailers.
When this coroutine finishes, you can access received trailing metadata
by using :py:attr:`trailing_metadata` attribute.
"""
if not self._end_done:
raise ProtocolError('Outgoing stream was not ended')
if (
not self._cardinality.server_streaming
and not self._recv_message_count
):
raise ProtocolError('No messages were received before waiting '
'for trailing metadata')
if self._recv_trailing_metadata_done:
raise ProtocolError('Trailing metadata was already received')
with self._wrapper:
headers = await self._stream.recv_headers()
self._recv_trailing_metadata_done = True
metadata = decode_metadata(headers)
metadata, = await self._dispatch.recv_trailing_metadata(metadata)
self.trailing_metadata = metadata
self._raise_for_grpc_status(dict(headers))
|
<ast.AsyncFunctionDef object at 0x7da1b08bd240>
|
keyword[async] keyword[def] identifier[recv_trailing_metadata] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_end_done] :
keyword[raise] identifier[ProtocolError] ( literal[string] )
keyword[if] (
keyword[not] identifier[self] . identifier[_cardinality] . identifier[server_streaming]
keyword[and] keyword[not] identifier[self] . identifier[_recv_message_count]
):
keyword[raise] identifier[ProtocolError] ( literal[string]
literal[string] )
keyword[if] identifier[self] . identifier[_recv_trailing_metadata_done] :
keyword[raise] identifier[ProtocolError] ( literal[string] )
keyword[with] identifier[self] . identifier[_wrapper] :
identifier[headers] = keyword[await] identifier[self] . identifier[_stream] . identifier[recv_headers] ()
identifier[self] . identifier[_recv_trailing_metadata_done] = keyword[True]
identifier[metadata] = identifier[decode_metadata] ( identifier[headers] )
identifier[metadata] ,= keyword[await] identifier[self] . identifier[_dispatch] . identifier[recv_trailing_metadata] ( identifier[metadata] )
identifier[self] . identifier[trailing_metadata] = identifier[metadata]
identifier[self] . identifier[_raise_for_grpc_status] ( identifier[dict] ( identifier[headers] ))
|
async def recv_trailing_metadata(self):
"""Coroutine to wait for trailers with trailing metadata from the
server.
.. note:: This coroutine will be called implicitly at exit from
this call (context manager's exit), if not called before explicitly.
May raise :py:class:`~grpclib.exceptions.GRPCError` if server returned
non-:py:attr:`Status.OK <grpclib.const.Status.OK>` in trailers.
When this coroutine finishes, you can access received trailing metadata
by using :py:attr:`trailing_metadata` attribute.
"""
if not self._end_done:
raise ProtocolError('Outgoing stream was not ended') # depends on [control=['if'], data=[]]
if not self._cardinality.server_streaming and (not self._recv_message_count):
raise ProtocolError('No messages were received before waiting for trailing metadata') # depends on [control=['if'], data=[]]
if self._recv_trailing_metadata_done:
raise ProtocolError('Trailing metadata was already received') # depends on [control=['if'], data=[]]
with self._wrapper:
headers = await self._stream.recv_headers()
self._recv_trailing_metadata_done = True
metadata = decode_metadata(headers)
(metadata,) = await self._dispatch.recv_trailing_metadata(metadata)
self.trailing_metadata = metadata
self._raise_for_grpc_status(dict(headers)) # depends on [control=['with'], data=[]]
|
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return (
isinstance(object, tuple) and
len(object) == 2 and
isinstance(object[0], basestring) and
isinstance(object[1], basestring))
|
def function[isqref, parameter[object]]:
constant[
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
]
return[<ast.BoolOp object at 0x7da1b08e5060>]
|
keyword[def] identifier[isqref] ( identifier[object] ):
literal[string]
keyword[return] (
identifier[isinstance] ( identifier[object] , identifier[tuple] ) keyword[and]
identifier[len] ( identifier[object] )== literal[int] keyword[and]
identifier[isinstance] ( identifier[object] [ literal[int] ], identifier[basestring] ) keyword[and]
identifier[isinstance] ( identifier[object] [ literal[int] ], identifier[basestring] ))
|
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return isinstance(object, tuple) and len(object) == 2 and isinstance(object[0], basestring) and isinstance(object[1], basestring)
|
def load_qrandom():
"""
Loads a set of 10000 random numbers generated by qrandom.
This dataset can be used when you want to do some limited tests with "true"
random data without an internet connection.
Returns:
int array
the dataset
"""
fname = "datasets/qrandom.npy"
with pkg_resources.resource_stream(__name__, fname) as f:
return np.load(f)
|
def function[load_qrandom, parameter[]]:
constant[
Loads a set of 10000 random numbers generated by qrandom.
This dataset can be used when you want to do some limited tests with "true"
random data without an internet connection.
Returns:
int array
the dataset
]
variable[fname] assign[=] constant[datasets/qrandom.npy]
with call[name[pkg_resources].resource_stream, parameter[name[__name__], name[fname]]] begin[:]
return[call[name[np].load, parameter[name[f]]]]
|
keyword[def] identifier[load_qrandom] ():
literal[string]
identifier[fname] = literal[string]
keyword[with] identifier[pkg_resources] . identifier[resource_stream] ( identifier[__name__] , identifier[fname] ) keyword[as] identifier[f] :
keyword[return] identifier[np] . identifier[load] ( identifier[f] )
|
def load_qrandom():
"""
Loads a set of 10000 random numbers generated by qrandom.
This dataset can be used when you want to do some limited tests with "true"
random data without an internet connection.
Returns:
int array
the dataset
"""
fname = 'datasets/qrandom.npy'
with pkg_resources.resource_stream(__name__, fname) as f:
return np.load(f) # depends on [control=['with'], data=['f']]
|
def scene_name(frames):
"""parse a scene.name message"""
# "scene.name" <scene_id> <config>
reader = MessageReader(frames)
results = reader.string("command").uint32("scene_id").string("name").assert_end().get()
if results.command != "scene.name":
raise MessageParserError("Command is not 'scene.name'")
return (results.scene_id, results.name)
|
def function[scene_name, parameter[frames]]:
constant[parse a scene.name message]
variable[reader] assign[=] call[name[MessageReader], parameter[name[frames]]]
variable[results] assign[=] call[call[call[call[call[name[reader].string, parameter[constant[command]]].uint32, parameter[constant[scene_id]]].string, parameter[constant[name]]].assert_end, parameter[]].get, parameter[]]
if compare[name[results].command not_equal[!=] constant[scene.name]] begin[:]
<ast.Raise object at 0x7da1b14510f0>
return[tuple[[<ast.Attribute object at 0x7da1b1450790>, <ast.Attribute object at 0x7da1b1450e80>]]]
|
keyword[def] identifier[scene_name] ( identifier[frames] ):
literal[string]
identifier[reader] = identifier[MessageReader] ( identifier[frames] )
identifier[results] = identifier[reader] . identifier[string] ( literal[string] ). identifier[uint32] ( literal[string] ). identifier[string] ( literal[string] ). identifier[assert_end] (). identifier[get] ()
keyword[if] identifier[results] . identifier[command] != literal[string] :
keyword[raise] identifier[MessageParserError] ( literal[string] )
keyword[return] ( identifier[results] . identifier[scene_id] , identifier[results] . identifier[name] )
|
def scene_name(frames):
"""parse a scene.name message"""
# "scene.name" <scene_id> <config>
reader = MessageReader(frames)
results = reader.string('command').uint32('scene_id').string('name').assert_end().get()
if results.command != 'scene.name':
raise MessageParserError("Command is not 'scene.name'") # depends on [control=['if'], data=[]]
return (results.scene_id, results.name)
|
def get_project_pod_spec(volume_mounts,
volumes,
image,
command,
args,
ports,
env_vars=None,
env_from=None,
container_name=None,
resources=None,
node_selector=None,
affinity=None,
tolerations=None,
image_pull_policy=None,
restart_policy=None,
service_account_name=None):
"""Pod spec to be used to create pods for project: tensorboard, notebooks."""
volume_mounts = to_list(volume_mounts, check_none=True)
volumes = to_list(volumes, check_none=True)
gpu_volume_mounts, gpu_volumes = get_gpu_volumes_def(resources)
volume_mounts += gpu_volume_mounts
volumes += gpu_volumes
ports = [client.V1ContainerPort(container_port=port) for port in ports]
pod_container = get_pod_container(
volume_mounts=volume_mounts,
image=image,
command=command,
args=args,
ports=ports,
env_vars=env_vars,
env_from=env_from,
container_name=container_name,
resources=resources,
image_pull_policy=image_pull_policy)
containers = [pod_container]
if service_account_name and not conf.get('K8S_RBAC_ENABLED'):
service_account_name = None
return client.V1PodSpec(restart_policy=restart_policy,
security_context=get_security_context(),
service_account_name=service_account_name,
containers=containers,
volumes=volumes,
node_selector=node_selector,
affinity=affinity,
tolerations=tolerations)
|
def function[get_project_pod_spec, parameter[volume_mounts, volumes, image, command, args, ports, env_vars, env_from, container_name, resources, node_selector, affinity, tolerations, image_pull_policy, restart_policy, service_account_name]]:
constant[Pod spec to be used to create pods for project: tensorboard, notebooks.]
variable[volume_mounts] assign[=] call[name[to_list], parameter[name[volume_mounts]]]
variable[volumes] assign[=] call[name[to_list], parameter[name[volumes]]]
<ast.Tuple object at 0x7da207f001f0> assign[=] call[name[get_gpu_volumes_def], parameter[name[resources]]]
<ast.AugAssign object at 0x7da207f01bd0>
<ast.AugAssign object at 0x7da207f031c0>
variable[ports] assign[=] <ast.ListComp object at 0x7da207f01000>
variable[pod_container] assign[=] call[name[get_pod_container], parameter[]]
variable[containers] assign[=] list[[<ast.Name object at 0x7da207f006d0>]]
if <ast.BoolOp object at 0x7da207f022c0> begin[:]
variable[service_account_name] assign[=] constant[None]
return[call[name[client].V1PodSpec, parameter[]]]
|
keyword[def] identifier[get_project_pod_spec] ( identifier[volume_mounts] ,
identifier[volumes] ,
identifier[image] ,
identifier[command] ,
identifier[args] ,
identifier[ports] ,
identifier[env_vars] = keyword[None] ,
identifier[env_from] = keyword[None] ,
identifier[container_name] = keyword[None] ,
identifier[resources] = keyword[None] ,
identifier[node_selector] = keyword[None] ,
identifier[affinity] = keyword[None] ,
identifier[tolerations] = keyword[None] ,
identifier[image_pull_policy] = keyword[None] ,
identifier[restart_policy] = keyword[None] ,
identifier[service_account_name] = keyword[None] ):
literal[string]
identifier[volume_mounts] = identifier[to_list] ( identifier[volume_mounts] , identifier[check_none] = keyword[True] )
identifier[volumes] = identifier[to_list] ( identifier[volumes] , identifier[check_none] = keyword[True] )
identifier[gpu_volume_mounts] , identifier[gpu_volumes] = identifier[get_gpu_volumes_def] ( identifier[resources] )
identifier[volume_mounts] += identifier[gpu_volume_mounts]
identifier[volumes] += identifier[gpu_volumes]
identifier[ports] =[ identifier[client] . identifier[V1ContainerPort] ( identifier[container_port] = identifier[port] ) keyword[for] identifier[port] keyword[in] identifier[ports] ]
identifier[pod_container] = identifier[get_pod_container] (
identifier[volume_mounts] = identifier[volume_mounts] ,
identifier[image] = identifier[image] ,
identifier[command] = identifier[command] ,
identifier[args] = identifier[args] ,
identifier[ports] = identifier[ports] ,
identifier[env_vars] = identifier[env_vars] ,
identifier[env_from] = identifier[env_from] ,
identifier[container_name] = identifier[container_name] ,
identifier[resources] = identifier[resources] ,
identifier[image_pull_policy] = identifier[image_pull_policy] )
identifier[containers] =[ identifier[pod_container] ]
keyword[if] identifier[service_account_name] keyword[and] keyword[not] identifier[conf] . identifier[get] ( literal[string] ):
identifier[service_account_name] = keyword[None]
keyword[return] identifier[client] . identifier[V1PodSpec] ( identifier[restart_policy] = identifier[restart_policy] ,
identifier[security_context] = identifier[get_security_context] (),
identifier[service_account_name] = identifier[service_account_name] ,
identifier[containers] = identifier[containers] ,
identifier[volumes] = identifier[volumes] ,
identifier[node_selector] = identifier[node_selector] ,
identifier[affinity] = identifier[affinity] ,
identifier[tolerations] = identifier[tolerations] )
|
def get_project_pod_spec(volume_mounts, volumes, image, command, args, ports, env_vars=None, env_from=None, container_name=None, resources=None, node_selector=None, affinity=None, tolerations=None, image_pull_policy=None, restart_policy=None, service_account_name=None):
"""Pod spec to be used to create pods for project: tensorboard, notebooks."""
volume_mounts = to_list(volume_mounts, check_none=True)
volumes = to_list(volumes, check_none=True)
(gpu_volume_mounts, gpu_volumes) = get_gpu_volumes_def(resources)
volume_mounts += gpu_volume_mounts
volumes += gpu_volumes
ports = [client.V1ContainerPort(container_port=port) for port in ports]
pod_container = get_pod_container(volume_mounts=volume_mounts, image=image, command=command, args=args, ports=ports, env_vars=env_vars, env_from=env_from, container_name=container_name, resources=resources, image_pull_policy=image_pull_policy)
containers = [pod_container]
if service_account_name and (not conf.get('K8S_RBAC_ENABLED')):
service_account_name = None # depends on [control=['if'], data=[]]
return client.V1PodSpec(restart_policy=restart_policy, security_context=get_security_context(), service_account_name=service_account_name, containers=containers, volumes=volumes, node_selector=node_selector, affinity=affinity, tolerations=tolerations)
|
def killCells(self, percent=0.05):
"""
Changes the percentage of cells that are now considered dead. The first
time you call this method a permutation list is set up. Calls change the
number of cells considered dead.
"""
numColumns = numpy.prod(self.getColumnDimensions())
if self.zombiePermutation is None:
self.zombiePermutation = numpy.random.permutation(numColumns)
self.numDead = int(round(percent * numColumns))
if self.numDead > 0:
self.deadCols = self.zombiePermutation[0:self.numDead]
else:
self.deadCols = numpy.array([])
self.deadColumnInputSpan = self.getConnectedSpan(self.deadCols)
self.removeDeadColumns()
|
def function[killCells, parameter[self, percent]]:
constant[
Changes the percentage of cells that are now considered dead. The first
time you call this method a permutation list is set up. Calls change the
number of cells considered dead.
]
variable[numColumns] assign[=] call[name[numpy].prod, parameter[call[name[self].getColumnDimensions, parameter[]]]]
if compare[name[self].zombiePermutation is constant[None]] begin[:]
name[self].zombiePermutation assign[=] call[name[numpy].random.permutation, parameter[name[numColumns]]]
name[self].numDead assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[name[percent] * name[numColumns]]]]]]
if compare[name[self].numDead greater[>] constant[0]] begin[:]
name[self].deadCols assign[=] call[name[self].zombiePermutation][<ast.Slice object at 0x7da1b08b2350>]
name[self].deadColumnInputSpan assign[=] call[name[self].getConnectedSpan, parameter[name[self].deadCols]]
call[name[self].removeDeadColumns, parameter[]]
|
keyword[def] identifier[killCells] ( identifier[self] , identifier[percent] = literal[int] ):
literal[string]
identifier[numColumns] = identifier[numpy] . identifier[prod] ( identifier[self] . identifier[getColumnDimensions] ())
keyword[if] identifier[self] . identifier[zombiePermutation] keyword[is] keyword[None] :
identifier[self] . identifier[zombiePermutation] = identifier[numpy] . identifier[random] . identifier[permutation] ( identifier[numColumns] )
identifier[self] . identifier[numDead] = identifier[int] ( identifier[round] ( identifier[percent] * identifier[numColumns] ))
keyword[if] identifier[self] . identifier[numDead] > literal[int] :
identifier[self] . identifier[deadCols] = identifier[self] . identifier[zombiePermutation] [ literal[int] : identifier[self] . identifier[numDead] ]
keyword[else] :
identifier[self] . identifier[deadCols] = identifier[numpy] . identifier[array] ([])
identifier[self] . identifier[deadColumnInputSpan] = identifier[self] . identifier[getConnectedSpan] ( identifier[self] . identifier[deadCols] )
identifier[self] . identifier[removeDeadColumns] ()
|
def killCells(self, percent=0.05):
"""
Changes the percentage of cells that are now considered dead. The first
time you call this method a permutation list is set up. Calls change the
number of cells considered dead.
"""
numColumns = numpy.prod(self.getColumnDimensions())
if self.zombiePermutation is None:
self.zombiePermutation = numpy.random.permutation(numColumns) # depends on [control=['if'], data=[]]
self.numDead = int(round(percent * numColumns))
if self.numDead > 0:
self.deadCols = self.zombiePermutation[0:self.numDead] # depends on [control=['if'], data=[]]
else:
self.deadCols = numpy.array([])
self.deadColumnInputSpan = self.getConnectedSpan(self.deadCols)
self.removeDeadColumns()
|
def link_sources(self):
"Returns potential Link or Stream sources."
if isinstance(self, GenericOverlayPlot):
zorders = []
elif self.batched:
zorders = list(range(self.zorder, self.zorder+len(self.hmap.last)))
else:
zorders = [self.zorder]
if isinstance(self, GenericOverlayPlot) and not self.batched:
sources = []
elif not self.static or isinstance(self.hmap, DynamicMap):
sources = [o for i, inputs in self.stream_sources.items()
for o in inputs if i in zorders]
else:
sources = [self.hmap.last]
return sources
|
def function[link_sources, parameter[self]]:
constant[Returns potential Link or Stream sources.]
if call[name[isinstance], parameter[name[self], name[GenericOverlayPlot]]] begin[:]
variable[zorders] assign[=] list[[]]
if <ast.BoolOp object at 0x7da18fe93610> begin[:]
variable[sources] assign[=] list[[]]
return[name[sources]]
|
keyword[def] identifier[link_sources] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] , identifier[GenericOverlayPlot] ):
identifier[zorders] =[]
keyword[elif] identifier[self] . identifier[batched] :
identifier[zorders] = identifier[list] ( identifier[range] ( identifier[self] . identifier[zorder] , identifier[self] . identifier[zorder] + identifier[len] ( identifier[self] . identifier[hmap] . identifier[last] )))
keyword[else] :
identifier[zorders] =[ identifier[self] . identifier[zorder] ]
keyword[if] identifier[isinstance] ( identifier[self] , identifier[GenericOverlayPlot] ) keyword[and] keyword[not] identifier[self] . identifier[batched] :
identifier[sources] =[]
keyword[elif] keyword[not] identifier[self] . identifier[static] keyword[or] identifier[isinstance] ( identifier[self] . identifier[hmap] , identifier[DynamicMap] ):
identifier[sources] =[ identifier[o] keyword[for] identifier[i] , identifier[inputs] keyword[in] identifier[self] . identifier[stream_sources] . identifier[items] ()
keyword[for] identifier[o] keyword[in] identifier[inputs] keyword[if] identifier[i] keyword[in] identifier[zorders] ]
keyword[else] :
identifier[sources] =[ identifier[self] . identifier[hmap] . identifier[last] ]
keyword[return] identifier[sources]
|
def link_sources(self):
"""Returns potential Link or Stream sources."""
if isinstance(self, GenericOverlayPlot):
zorders = [] # depends on [control=['if'], data=[]]
elif self.batched:
zorders = list(range(self.zorder, self.zorder + len(self.hmap.last))) # depends on [control=['if'], data=[]]
else:
zorders = [self.zorder]
if isinstance(self, GenericOverlayPlot) and (not self.batched):
sources = [] # depends on [control=['if'], data=[]]
elif not self.static or isinstance(self.hmap, DynamicMap):
sources = [o for (i, inputs) in self.stream_sources.items() for o in inputs if i in zorders] # depends on [control=['if'], data=[]]
else:
sources = [self.hmap.last]
return sources
|
def do_exit(self, arg):
''' Exit the shell. '''
if self.arm.is_connected():
self.arm.disconnect()
print('Bye!')
return True
|
def function[do_exit, parameter[self, arg]]:
constant[ Exit the shell. ]
if call[name[self].arm.is_connected, parameter[]] begin[:]
call[name[self].arm.disconnect, parameter[]]
call[name[print], parameter[constant[Bye!]]]
return[constant[True]]
|
keyword[def] identifier[do_exit] ( identifier[self] , identifier[arg] ):
literal[string]
keyword[if] identifier[self] . identifier[arm] . identifier[is_connected] ():
identifier[self] . identifier[arm] . identifier[disconnect] ()
identifier[print] ( literal[string] )
keyword[return] keyword[True]
|
def do_exit(self, arg):
""" Exit the shell. """
if self.arm.is_connected():
self.arm.disconnect() # depends on [control=['if'], data=[]]
print('Bye!')
return True
|
def get_qutip_module(required_version='3.2'):
"""
Attempts to return the qutip module, but
silently returns ``None`` if it can't be
imported, or doesn't have version at
least ``required_version``.
:param str required_version: Valid input to
``distutils.version.LooseVersion``.
:return: The qutip module or ``None``.
:rtype: ``module`` or ``NoneType``
"""
try:
import qutip as qt
from distutils.version import LooseVersion
_qt_version = LooseVersion(qt.version.version)
if _qt_version < LooseVersion(required_version):
return None
except ImportError:
return None
return qt
|
def function[get_qutip_module, parameter[required_version]]:
constant[
Attempts to return the qutip module, but
silently returns ``None`` if it can't be
imported, or doesn't have version at
least ``required_version``.
:param str required_version: Valid input to
``distutils.version.LooseVersion``.
:return: The qutip module or ``None``.
:rtype: ``module`` or ``NoneType``
]
<ast.Try object at 0x7da20c6c6d10>
return[name[qt]]
|
keyword[def] identifier[get_qutip_module] ( identifier[required_version] = literal[string] ):
literal[string]
keyword[try] :
keyword[import] identifier[qutip] keyword[as] identifier[qt]
keyword[from] identifier[distutils] . identifier[version] keyword[import] identifier[LooseVersion]
identifier[_qt_version] = identifier[LooseVersion] ( identifier[qt] . identifier[version] . identifier[version] )
keyword[if] identifier[_qt_version] < identifier[LooseVersion] ( identifier[required_version] ):
keyword[return] keyword[None]
keyword[except] identifier[ImportError] :
keyword[return] keyword[None]
keyword[return] identifier[qt]
|
def get_qutip_module(required_version='3.2'):
"""
Attempts to return the qutip module, but
silently returns ``None`` if it can't be
imported, or doesn't have version at
least ``required_version``.
:param str required_version: Valid input to
``distutils.version.LooseVersion``.
:return: The qutip module or ``None``.
:rtype: ``module`` or ``NoneType``
"""
try:
import qutip as qt
from distutils.version import LooseVersion
_qt_version = LooseVersion(qt.version.version)
if _qt_version < LooseVersion(required_version):
return None # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ImportError:
return None # depends on [control=['except'], data=[]]
return qt
|
def from_file(cls, filename):
"""
Construct an APIDefinition by parsing the given `filename`.
If PyYAML is installed, YAML files are supported.
JSON files are always supported.
:param filename: The filename to read.
:rtype: APIDefinition
"""
with open(filename) as infp:
if filename.endswith('.yaml') or filename.endswith('.yml'):
import yaml
data = yaml.safe_load(infp)
else:
import json
data = json.load(infp)
return cls.from_data(data)
|
def function[from_file, parameter[cls, filename]]:
constant[
Construct an APIDefinition by parsing the given `filename`.
If PyYAML is installed, YAML files are supported.
JSON files are always supported.
:param filename: The filename to read.
:rtype: APIDefinition
]
with call[name[open], parameter[name[filename]]] begin[:]
if <ast.BoolOp object at 0x7da18bcc84f0> begin[:]
import module[yaml]
variable[data] assign[=] call[name[yaml].safe_load, parameter[name[infp]]]
return[call[name[cls].from_data, parameter[name[data]]]]
|
keyword[def] identifier[from_file] ( identifier[cls] , identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[infp] :
keyword[if] identifier[filename] . identifier[endswith] ( literal[string] ) keyword[or] identifier[filename] . identifier[endswith] ( literal[string] ):
keyword[import] identifier[yaml]
identifier[data] = identifier[yaml] . identifier[safe_load] ( identifier[infp] )
keyword[else] :
keyword[import] identifier[json]
identifier[data] = identifier[json] . identifier[load] ( identifier[infp] )
keyword[return] identifier[cls] . identifier[from_data] ( identifier[data] )
|
def from_file(cls, filename):
"""
Construct an APIDefinition by parsing the given `filename`.
If PyYAML is installed, YAML files are supported.
JSON files are always supported.
:param filename: The filename to read.
:rtype: APIDefinition
"""
with open(filename) as infp:
if filename.endswith('.yaml') or filename.endswith('.yml'):
import yaml
data = yaml.safe_load(infp) # depends on [control=['if'], data=[]]
else:
import json
data = json.load(infp) # depends on [control=['with'], data=['infp']]
return cls.from_data(data)
|
def wsgiref_thread_arbiter(wsgi, host, port):
'probably not suitable for production use; example of threaded server'
import wsgiref.simple_server
httpd = wsgiref.simple_server.make_server(host, port, wsgi)
httpd.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def start_server():
server_thread = threading.Thread(target=httpd.serve_forever)
server_thread.daemon = True
server_thread.start()
def close_socket():
httpd.socket.close()
arbiter = Arbiter(post_fork=start_server, child_pre_exit=httpd.shutdown,
parent_pre_stop=close_socket)
return arbiter
|
def function[wsgiref_thread_arbiter, parameter[wsgi, host, port]]:
constant[probably not suitable for production use; example of threaded server]
import module[wsgiref.simple_server]
variable[httpd] assign[=] call[name[wsgiref].simple_server.make_server, parameter[name[host], name[port], name[wsgi]]]
call[name[httpd].socket.setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEADDR, constant[1]]]
def function[start_server, parameter[]]:
variable[server_thread] assign[=] call[name[threading].Thread, parameter[]]
name[server_thread].daemon assign[=] constant[True]
call[name[server_thread].start, parameter[]]
def function[close_socket, parameter[]]:
call[name[httpd].socket.close, parameter[]]
variable[arbiter] assign[=] call[name[Arbiter], parameter[]]
return[name[arbiter]]
|
keyword[def] identifier[wsgiref_thread_arbiter] ( identifier[wsgi] , identifier[host] , identifier[port] ):
literal[string]
keyword[import] identifier[wsgiref] . identifier[simple_server]
identifier[httpd] = identifier[wsgiref] . identifier[simple_server] . identifier[make_server] ( identifier[host] , identifier[port] , identifier[wsgi] )
identifier[httpd] . identifier[socket] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] )
keyword[def] identifier[start_server] ():
identifier[server_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[httpd] . identifier[serve_forever] )
identifier[server_thread] . identifier[daemon] = keyword[True]
identifier[server_thread] . identifier[start] ()
keyword[def] identifier[close_socket] ():
identifier[httpd] . identifier[socket] . identifier[close] ()
identifier[arbiter] = identifier[Arbiter] ( identifier[post_fork] = identifier[start_server] , identifier[child_pre_exit] = identifier[httpd] . identifier[shutdown] ,
identifier[parent_pre_stop] = identifier[close_socket] )
keyword[return] identifier[arbiter]
|
def wsgiref_thread_arbiter(wsgi, host, port):
"""probably not suitable for production use; example of threaded server"""
import wsgiref.simple_server
httpd = wsgiref.simple_server.make_server(host, port, wsgi)
httpd.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def start_server():
server_thread = threading.Thread(target=httpd.serve_forever)
server_thread.daemon = True
server_thread.start()
def close_socket():
httpd.socket.close()
arbiter = Arbiter(post_fork=start_server, child_pre_exit=httpd.shutdown, parent_pre_stop=close_socket)
return arbiter
|
def query(method='servers', server_id=None, command=None, args=None,
http_method='GET', root='api_root'):
''' Make a call to the Scaleway API.
'''
if root == 'api_root':
default_url = 'https://cp-par1.scaleway.com'
else:
default_url = 'https://api-marketplace.scaleway.com'
base_path = six.text_type(config.get_cloud_config_value(
root,
get_configured_provider(),
__opts__,
search_global=False,
default=default_url
))
path = '{0}/{1}/'.format(base_path, method)
if server_id:
path += '{0}/'.format(server_id)
if command:
path += command
if not isinstance(args, dict):
args = {}
token = config.get_cloud_config_value(
'token', get_configured_provider(), __opts__, search_global=False
)
data = salt.utils.json.dumps(args)
request = __utils__["http.query"](path,
method=http_method,
data=data,
status=True,
decode=True,
decode_type='json',
data_render=True,
data_renderer='json',
headers=True,
header_dict={'X-Auth-Token': token,
'User-Agent': "salt-cloud",
'Content-Type': 'application/json'})
if request['status'] > 299:
raise SaltCloudSystemExit(
'An error occurred while querying Scaleway. HTTP Code: {0} '
'Error: \'{1}\''.format(
request['status'],
request['error']
)
)
# success without data
if request['status'] == 204:
return True
return salt.utils.json.loads(request['body'])
|
def function[query, parameter[method, server_id, command, args, http_method, root]]:
constant[ Make a call to the Scaleway API.
]
if compare[name[root] equal[==] constant[api_root]] begin[:]
variable[default_url] assign[=] constant[https://cp-par1.scaleway.com]
variable[base_path] assign[=] call[name[six].text_type, parameter[call[name[config].get_cloud_config_value, parameter[name[root], call[name[get_configured_provider], parameter[]], name[__opts__]]]]]
variable[path] assign[=] call[constant[{0}/{1}/].format, parameter[name[base_path], name[method]]]
if name[server_id] begin[:]
<ast.AugAssign object at 0x7da18c4cdf00>
if name[command] begin[:]
<ast.AugAssign object at 0x7da18c4cf670>
if <ast.UnaryOp object at 0x7da18c4ce920> begin[:]
variable[args] assign[=] dictionary[[], []]
variable[token] assign[=] call[name[config].get_cloud_config_value, parameter[constant[token], call[name[get_configured_provider], parameter[]], name[__opts__]]]
variable[data] assign[=] call[name[salt].utils.json.dumps, parameter[name[args]]]
variable[request] assign[=] call[call[name[__utils__]][constant[http.query]], parameter[name[path]]]
if compare[call[name[request]][constant[status]] greater[>] constant[299]] begin[:]
<ast.Raise object at 0x7da18fe90040>
if compare[call[name[request]][constant[status]] equal[==] constant[204]] begin[:]
return[constant[True]]
return[call[name[salt].utils.json.loads, parameter[call[name[request]][constant[body]]]]]
|
keyword[def] identifier[query] ( identifier[method] = literal[string] , identifier[server_id] = keyword[None] , identifier[command] = keyword[None] , identifier[args] = keyword[None] ,
identifier[http_method] = literal[string] , identifier[root] = literal[string] ):
literal[string]
keyword[if] identifier[root] == literal[string] :
identifier[default_url] = literal[string]
keyword[else] :
identifier[default_url] = literal[string]
identifier[base_path] = identifier[six] . identifier[text_type] ( identifier[config] . identifier[get_cloud_config_value] (
identifier[root] ,
identifier[get_configured_provider] (),
identifier[__opts__] ,
identifier[search_global] = keyword[False] ,
identifier[default] = identifier[default_url]
))
identifier[path] = literal[string] . identifier[format] ( identifier[base_path] , identifier[method] )
keyword[if] identifier[server_id] :
identifier[path] += literal[string] . identifier[format] ( identifier[server_id] )
keyword[if] identifier[command] :
identifier[path] += identifier[command]
keyword[if] keyword[not] identifier[isinstance] ( identifier[args] , identifier[dict] ):
identifier[args] ={}
identifier[token] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False]
)
identifier[data] = identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[args] )
identifier[request] = identifier[__utils__] [ literal[string] ]( identifier[path] ,
identifier[method] = identifier[http_method] ,
identifier[data] = identifier[data] ,
identifier[status] = keyword[True] ,
identifier[decode] = keyword[True] ,
identifier[decode_type] = literal[string] ,
identifier[data_render] = keyword[True] ,
identifier[data_renderer] = literal[string] ,
identifier[headers] = keyword[True] ,
identifier[header_dict] ={ literal[string] : identifier[token] ,
literal[string] : literal[string] ,
literal[string] : literal[string] })
keyword[if] identifier[request] [ literal[string] ]> literal[int] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string] . identifier[format] (
identifier[request] [ literal[string] ],
identifier[request] [ literal[string] ]
)
)
keyword[if] identifier[request] [ literal[string] ]== literal[int] :
keyword[return] keyword[True]
keyword[return] identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[request] [ literal[string] ])
|
def query(method='servers', server_id=None, command=None, args=None, http_method='GET', root='api_root'):
""" Make a call to the Scaleway API.
"""
if root == 'api_root':
default_url = 'https://cp-par1.scaleway.com' # depends on [control=['if'], data=[]]
else:
default_url = 'https://api-marketplace.scaleway.com'
base_path = six.text_type(config.get_cloud_config_value(root, get_configured_provider(), __opts__, search_global=False, default=default_url))
path = '{0}/{1}/'.format(base_path, method)
if server_id:
path += '{0}/'.format(server_id) # depends on [control=['if'], data=[]]
if command:
path += command # depends on [control=['if'], data=[]]
if not isinstance(args, dict):
args = {} # depends on [control=['if'], data=[]]
token = config.get_cloud_config_value('token', get_configured_provider(), __opts__, search_global=False)
data = salt.utils.json.dumps(args)
request = __utils__['http.query'](path, method=http_method, data=data, status=True, decode=True, decode_type='json', data_render=True, data_renderer='json', headers=True, header_dict={'X-Auth-Token': token, 'User-Agent': 'salt-cloud', 'Content-Type': 'application/json'})
if request['status'] > 299:
raise SaltCloudSystemExit("An error occurred while querying Scaleway. HTTP Code: {0} Error: '{1}'".format(request['status'], request['error'])) # depends on [control=['if'], data=[]]
# success without data
if request['status'] == 204:
return True # depends on [control=['if'], data=[]]
return salt.utils.json.loads(request['body'])
|
def _array_repeat(t, expr):
"""Is this really that useful?
Repeat an array like a Python list using modular arithmetic,
scalar subqueries, and PostgreSQL's ARRAY function.
This is inefficient if PostgreSQL allocates memory for the entire sequence
and the output column. A quick glance at PostgreSQL's C code shows the
sequence is evaluated stepwise, which suggests that it's roughly constant
memory for the sequence generation.
"""
raw, times = map(t.translate, expr.op().args)
# SQLAlchemy uses our column's table in the FROM clause. We need a simpler
# expression to workaround this.
array = sa.column(raw.name, type_=raw.type)
# We still need to prefix the table name to the column name in the final
# query, so make sure the column knows its origin
array.table = raw.table
array_length = _cardinality(array)
# sequence from 1 to the total number of elements desired in steps of 1.
# the call to greatest isn't necessary, but it provides clearer intent
# rather than depending on the implicit postgres generate_series behavior
start = step = 1
stop = sa.func.greatest(times, 0) * array_length
series = sa.func.generate_series(start, stop, step).alias()
series_column = sa.column(series.name, type_=sa.INTEGER)
# if our current index modulo the array's length
# is a multiple of the array's length, then the index is the array's length
index_expression = series_column % array_length
index = sa.func.coalesce(sa.func.nullif(index_expression, 0), array_length)
# tie it all together in a scalar subquery and collapse that into an ARRAY
selected = sa.select([array[index]]).select_from(series)
return sa.func.array(selected.as_scalar())
|
def function[_array_repeat, parameter[t, expr]]:
constant[Is this really that useful?
Repeat an array like a Python list using modular arithmetic,
scalar subqueries, and PostgreSQL's ARRAY function.
This is inefficient if PostgreSQL allocates memory for the entire sequence
and the output column. A quick glance at PostgreSQL's C code shows the
sequence is evaluated stepwise, which suggests that it's roughly constant
memory for the sequence generation.
]
<ast.Tuple object at 0x7da20e956c80> assign[=] call[name[map], parameter[name[t].translate, call[name[expr].op, parameter[]].args]]
variable[array] assign[=] call[name[sa].column, parameter[name[raw].name]]
name[array].table assign[=] name[raw].table
variable[array_length] assign[=] call[name[_cardinality], parameter[name[array]]]
variable[start] assign[=] constant[1]
variable[stop] assign[=] binary_operation[call[name[sa].func.greatest, parameter[name[times], constant[0]]] * name[array_length]]
variable[series] assign[=] call[call[name[sa].func.generate_series, parameter[name[start], name[stop], name[step]]].alias, parameter[]]
variable[series_column] assign[=] call[name[sa].column, parameter[name[series].name]]
variable[index_expression] assign[=] binary_operation[name[series_column] <ast.Mod object at 0x7da2590d6920> name[array_length]]
variable[index] assign[=] call[name[sa].func.coalesce, parameter[call[name[sa].func.nullif, parameter[name[index_expression], constant[0]]], name[array_length]]]
variable[selected] assign[=] call[call[name[sa].select, parameter[list[[<ast.Subscript object at 0x7da20e956710>]]]].select_from, parameter[name[series]]]
return[call[name[sa].func.array, parameter[call[name[selected].as_scalar, parameter[]]]]]
|
keyword[def] identifier[_array_repeat] ( identifier[t] , identifier[expr] ):
literal[string]
identifier[raw] , identifier[times] = identifier[map] ( identifier[t] . identifier[translate] , identifier[expr] . identifier[op] (). identifier[args] )
identifier[array] = identifier[sa] . identifier[column] ( identifier[raw] . identifier[name] , identifier[type_] = identifier[raw] . identifier[type] )
identifier[array] . identifier[table] = identifier[raw] . identifier[table]
identifier[array_length] = identifier[_cardinality] ( identifier[array] )
identifier[start] = identifier[step] = literal[int]
identifier[stop] = identifier[sa] . identifier[func] . identifier[greatest] ( identifier[times] , literal[int] )* identifier[array_length]
identifier[series] = identifier[sa] . identifier[func] . identifier[generate_series] ( identifier[start] , identifier[stop] , identifier[step] ). identifier[alias] ()
identifier[series_column] = identifier[sa] . identifier[column] ( identifier[series] . identifier[name] , identifier[type_] = identifier[sa] . identifier[INTEGER] )
identifier[index_expression] = identifier[series_column] % identifier[array_length]
identifier[index] = identifier[sa] . identifier[func] . identifier[coalesce] ( identifier[sa] . identifier[func] . identifier[nullif] ( identifier[index_expression] , literal[int] ), identifier[array_length] )
identifier[selected] = identifier[sa] . identifier[select] ([ identifier[array] [ identifier[index] ]]). identifier[select_from] ( identifier[series] )
keyword[return] identifier[sa] . identifier[func] . identifier[array] ( identifier[selected] . identifier[as_scalar] ())
|
def _array_repeat(t, expr):
"""Is this really that useful?
Repeat an array like a Python list using modular arithmetic,
scalar subqueries, and PostgreSQL's ARRAY function.
This is inefficient if PostgreSQL allocates memory for the entire sequence
and the output column. A quick glance at PostgreSQL's C code shows the
sequence is evaluated stepwise, which suggests that it's roughly constant
memory for the sequence generation.
"""
(raw, times) = map(t.translate, expr.op().args)
# SQLAlchemy uses our column's table in the FROM clause. We need a simpler
# expression to workaround this.
array = sa.column(raw.name, type_=raw.type)
# We still need to prefix the table name to the column name in the final
# query, so make sure the column knows its origin
array.table = raw.table
array_length = _cardinality(array)
# sequence from 1 to the total number of elements desired in steps of 1.
# the call to greatest isn't necessary, but it provides clearer intent
# rather than depending on the implicit postgres generate_series behavior
start = step = 1
stop = sa.func.greatest(times, 0) * array_length
series = sa.func.generate_series(start, stop, step).alias()
series_column = sa.column(series.name, type_=sa.INTEGER)
# if our current index modulo the array's length
# is a multiple of the array's length, then the index is the array's length
index_expression = series_column % array_length
index = sa.func.coalesce(sa.func.nullif(index_expression, 0), array_length)
# tie it all together in a scalar subquery and collapse that into an ARRAY
selected = sa.select([array[index]]).select_from(series)
return sa.func.array(selected.as_scalar())
|
def predict(self, parsed_json):
"""
Parameters
----------
parsed_json : dict
with keys 'data' and 'id', where 'data' contains a recording and
'id' is the id on write-math.com for debugging purposes
"""
evaluate = utils.evaluate_model_single_recording_preloaded
results = evaluate(self.preprocessing_queue,
self.feature_list,
self.model,
self.output_semantics,
json.dumps(parsed_json['data']),
parsed_json['id'])
return results
|
def function[predict, parameter[self, parsed_json]]:
constant[
Parameters
----------
parsed_json : dict
with keys 'data' and 'id', where 'data' contains a recording and
'id' is the id on write-math.com for debugging purposes
]
variable[evaluate] assign[=] name[utils].evaluate_model_single_recording_preloaded
variable[results] assign[=] call[name[evaluate], parameter[name[self].preprocessing_queue, name[self].feature_list, name[self].model, name[self].output_semantics, call[name[json].dumps, parameter[call[name[parsed_json]][constant[data]]]], call[name[parsed_json]][constant[id]]]]
return[name[results]]
|
keyword[def] identifier[predict] ( identifier[self] , identifier[parsed_json] ):
literal[string]
identifier[evaluate] = identifier[utils] . identifier[evaluate_model_single_recording_preloaded]
identifier[results] = identifier[evaluate] ( identifier[self] . identifier[preprocessing_queue] ,
identifier[self] . identifier[feature_list] ,
identifier[self] . identifier[model] ,
identifier[self] . identifier[output_semantics] ,
identifier[json] . identifier[dumps] ( identifier[parsed_json] [ literal[string] ]),
identifier[parsed_json] [ literal[string] ])
keyword[return] identifier[results]
|
def predict(self, parsed_json):
"""
Parameters
----------
parsed_json : dict
with keys 'data' and 'id', where 'data' contains a recording and
'id' is the id on write-math.com for debugging purposes
"""
evaluate = utils.evaluate_model_single_recording_preloaded
results = evaluate(self.preprocessing_queue, self.feature_list, self.model, self.output_semantics, json.dumps(parsed_json['data']), parsed_json['id'])
return results
|
def create_from_euler_angles(cls, rx, ry, rz, degrees=False):
""" Classmethod to create a quaternion given the euler angles.
"""
if degrees:
rx, ry, rz = np.radians([rx, ry, rz])
# Obtain quaternions
qx = Quaternion(np.cos(rx/2), 0, 0, np.sin(rx/2))
qy = Quaternion(np.cos(ry/2), 0, np.sin(ry/2), 0)
qz = Quaternion(np.cos(rz/2), np.sin(rz/2), 0, 0)
# Almost done
return qx*qy*qz
|
def function[create_from_euler_angles, parameter[cls, rx, ry, rz, degrees]]:
constant[ Classmethod to create a quaternion given the euler angles.
]
if name[degrees] begin[:]
<ast.Tuple object at 0x7da18c4cfbb0> assign[=] call[name[np].radians, parameter[list[[<ast.Name object at 0x7da18c4cf430>, <ast.Name object at 0x7da18c4cf5b0>, <ast.Name object at 0x7da18c4cd360>]]]]
variable[qx] assign[=] call[name[Quaternion], parameter[call[name[np].cos, parameter[binary_operation[name[rx] / constant[2]]]], constant[0], constant[0], call[name[np].sin, parameter[binary_operation[name[rx] / constant[2]]]]]]
variable[qy] assign[=] call[name[Quaternion], parameter[call[name[np].cos, parameter[binary_operation[name[ry] / constant[2]]]], constant[0], call[name[np].sin, parameter[binary_operation[name[ry] / constant[2]]]], constant[0]]]
variable[qz] assign[=] call[name[Quaternion], parameter[call[name[np].cos, parameter[binary_operation[name[rz] / constant[2]]]], call[name[np].sin, parameter[binary_operation[name[rz] / constant[2]]]], constant[0], constant[0]]]
return[binary_operation[binary_operation[name[qx] * name[qy]] * name[qz]]]
|
keyword[def] identifier[create_from_euler_angles] ( identifier[cls] , identifier[rx] , identifier[ry] , identifier[rz] , identifier[degrees] = keyword[False] ):
literal[string]
keyword[if] identifier[degrees] :
identifier[rx] , identifier[ry] , identifier[rz] = identifier[np] . identifier[radians] ([ identifier[rx] , identifier[ry] , identifier[rz] ])
identifier[qx] = identifier[Quaternion] ( identifier[np] . identifier[cos] ( identifier[rx] / literal[int] ), literal[int] , literal[int] , identifier[np] . identifier[sin] ( identifier[rx] / literal[int] ))
identifier[qy] = identifier[Quaternion] ( identifier[np] . identifier[cos] ( identifier[ry] / literal[int] ), literal[int] , identifier[np] . identifier[sin] ( identifier[ry] / literal[int] ), literal[int] )
identifier[qz] = identifier[Quaternion] ( identifier[np] . identifier[cos] ( identifier[rz] / literal[int] ), identifier[np] . identifier[sin] ( identifier[rz] / literal[int] ), literal[int] , literal[int] )
keyword[return] identifier[qx] * identifier[qy] * identifier[qz]
|
def create_from_euler_angles(cls, rx, ry, rz, degrees=False):
""" Classmethod to create a quaternion given the euler angles.
"""
if degrees:
(rx, ry, rz) = np.radians([rx, ry, rz]) # depends on [control=['if'], data=[]]
# Obtain quaternions
qx = Quaternion(np.cos(rx / 2), 0, 0, np.sin(rx / 2))
qy = Quaternion(np.cos(ry / 2), 0, np.sin(ry / 2), 0)
qz = Quaternion(np.cos(rz / 2), np.sin(rz / 2), 0, 0)
# Almost done
return qx * qy * qz
|
def from_bytearray(self, stream):
"""
Constructs this frame from input data stream, consuming as many bytes as necessary from
the beginning of the stream.
If stream does not contain enough data to construct a complete modbus frame, an EOFError
is raised and no data is consumed.
:param stream: bytearray to consume data from to construct this frame.
:except EOFError: Not enough data for complete frame; no data consumed.
"""
fmt = '>HHHBB'
size_header = struct.calcsize(fmt)
if len(stream) < size_header:
raise EOFError
(
self.transaction_id,
self.protocol_id,
self.length,
self.unit_id,
self.fcode
) = struct.unpack(fmt, bytes(stream[:size_header]))
size_total = size_header + self.length - 2
if len(stream) < size_total:
raise EOFError
self.data = stream[size_header:size_total]
del stream[:size_total]
|
def function[from_bytearray, parameter[self, stream]]:
constant[
Constructs this frame from input data stream, consuming as many bytes as necessary from
the beginning of the stream.
If stream does not contain enough data to construct a complete modbus frame, an EOFError
is raised and no data is consumed.
:param stream: bytearray to consume data from to construct this frame.
:except EOFError: Not enough data for complete frame; no data consumed.
]
variable[fmt] assign[=] constant[>HHHBB]
variable[size_header] assign[=] call[name[struct].calcsize, parameter[name[fmt]]]
if compare[call[name[len], parameter[name[stream]]] less[<] name[size_header]] begin[:]
<ast.Raise object at 0x7da1b13b6080>
<ast.Tuple object at 0x7da1b13b5c60> assign[=] call[name[struct].unpack, parameter[name[fmt], call[name[bytes], parameter[call[name[stream]][<ast.Slice object at 0x7da1b13b7850>]]]]]
variable[size_total] assign[=] binary_operation[binary_operation[name[size_header] + name[self].length] - constant[2]]
if compare[call[name[len], parameter[name[stream]]] less[<] name[size_total]] begin[:]
<ast.Raise object at 0x7da1b13b6050>
name[self].data assign[=] call[name[stream]][<ast.Slice object at 0x7da1b13b4d60>]
<ast.Delete object at 0x7da1b13b6dd0>
|
keyword[def] identifier[from_bytearray] ( identifier[self] , identifier[stream] ):
literal[string]
identifier[fmt] = literal[string]
identifier[size_header] = identifier[struct] . identifier[calcsize] ( identifier[fmt] )
keyword[if] identifier[len] ( identifier[stream] )< identifier[size_header] :
keyword[raise] identifier[EOFError]
(
identifier[self] . identifier[transaction_id] ,
identifier[self] . identifier[protocol_id] ,
identifier[self] . identifier[length] ,
identifier[self] . identifier[unit_id] ,
identifier[self] . identifier[fcode]
)= identifier[struct] . identifier[unpack] ( identifier[fmt] , identifier[bytes] ( identifier[stream] [: identifier[size_header] ]))
identifier[size_total] = identifier[size_header] + identifier[self] . identifier[length] - literal[int]
keyword[if] identifier[len] ( identifier[stream] )< identifier[size_total] :
keyword[raise] identifier[EOFError]
identifier[self] . identifier[data] = identifier[stream] [ identifier[size_header] : identifier[size_total] ]
keyword[del] identifier[stream] [: identifier[size_total] ]
|
def from_bytearray(self, stream):
"""
Constructs this frame from input data stream, consuming as many bytes as necessary from
the beginning of the stream.
If stream does not contain enough data to construct a complete modbus frame, an EOFError
is raised and no data is consumed.
:param stream: bytearray to consume data from to construct this frame.
:except EOFError: Not enough data for complete frame; no data consumed.
"""
fmt = '>HHHBB'
size_header = struct.calcsize(fmt)
if len(stream) < size_header:
raise EOFError # depends on [control=['if'], data=[]]
(self.transaction_id, self.protocol_id, self.length, self.unit_id, self.fcode) = struct.unpack(fmt, bytes(stream[:size_header]))
size_total = size_header + self.length - 2
if len(stream) < size_total:
raise EOFError # depends on [control=['if'], data=[]]
self.data = stream[size_header:size_total]
del stream[:size_total]
|
def _set_ip_vrrp_extended(self, v, load=False):
"""
Setter method for ip_vrrp_extended, mapped from YANG variable /routing_system/interface/ve/ip/ip_vrrp_extended (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_vrrp_extended is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_vrrp_extended() directly.
YANG Description: VRRP Extended
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ip_vrrp_extended.ip_vrrp_extended, is_container='container', presence=False, yang_name="ip-vrrp-extended", rest_name="vrrp-extended", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'VRRP Extended', u'cli-sequence-commands': None, u'alt-name': u'vrrp-extended', u'callpoint': u'VRRPEIpMd5Auth'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip_vrrp_extended must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ip_vrrp_extended.ip_vrrp_extended, is_container='container', presence=False, yang_name="ip-vrrp-extended", rest_name="vrrp-extended", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'VRRP Extended', u'cli-sequence-commands': None, u'alt-name': u'vrrp-extended', u'callpoint': u'VRRPEIpMd5Auth'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)""",
})
self.__ip_vrrp_extended = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_ip_vrrp_extended, parameter[self, v, load]]:
constant[
Setter method for ip_vrrp_extended, mapped from YANG variable /routing_system/interface/ve/ip/ip_vrrp_extended (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_vrrp_extended is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_vrrp_extended() directly.
YANG Description: VRRP Extended
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2054a7010>
name[self].__ip_vrrp_extended assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_ip_vrrp_extended] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[ip_vrrp_extended] . identifier[ip_vrrp_extended] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__ip_vrrp_extended] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_ip_vrrp_extended(self, v, load=False):
"""
Setter method for ip_vrrp_extended, mapped from YANG variable /routing_system/interface/ve/ip/ip_vrrp_extended (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_vrrp_extended is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_vrrp_extended() directly.
YANG Description: VRRP Extended
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=ip_vrrp_extended.ip_vrrp_extended, is_container='container', presence=False, yang_name='ip-vrrp-extended', rest_name='vrrp-extended', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'VRRP Extended', u'cli-sequence-commands': None, u'alt-name': u'vrrp-extended', u'callpoint': u'VRRPEIpMd5Auth'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'ip_vrrp_extended must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=ip_vrrp_extended.ip_vrrp_extended, is_container=\'container\', presence=False, yang_name="ip-vrrp-extended", rest_name="vrrp-extended", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-compact-syntax\': None, u\'info\': u\'VRRP Extended\', u\'cli-sequence-commands\': None, u\'alt-name\': u\'vrrp-extended\', u\'callpoint\': u\'VRRPEIpMd5Auth\'}}, namespace=\'urn:brocade.com:mgmt:brocade-vrrp\', defining_module=\'brocade-vrrp\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__ip_vrrp_extended = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def top_commenters(self, num):
"""Return a markdown representation of the top commenters."""
num = min(num, len(self.commenters))
if num <= 0:
return ''
top_commenters = sorted(
iteritems(self.commenters),
key=lambda x: (-sum(y.score for y in x[1]),
-len(x[1]), str(x[0])))[:num]
retval = self.post_header.format('Top Commenters')
for author, comments in top_commenters:
retval += '1. {} ({}, {} comment{})\n'.format(
self._user(author),
self._points(sum(x.score for x in comments)),
len(comments), 's' if len(comments) != 1 else '')
return '{}\n'.format(retval)
|
def function[top_commenters, parameter[self, num]]:
constant[Return a markdown representation of the top commenters.]
variable[num] assign[=] call[name[min], parameter[name[num], call[name[len], parameter[name[self].commenters]]]]
if compare[name[num] less_or_equal[<=] constant[0]] begin[:]
return[constant[]]
variable[top_commenters] assign[=] call[call[name[sorted], parameter[call[name[iteritems], parameter[name[self].commenters]]]]][<ast.Slice object at 0x7da20c7ca350>]
variable[retval] assign[=] call[name[self].post_header.format, parameter[constant[Top Commenters]]]
for taget[tuple[[<ast.Name object at 0x7da20c7c9240>, <ast.Name object at 0x7da20c7c9e70>]]] in starred[name[top_commenters]] begin[:]
<ast.AugAssign object at 0x7da20c7c8100>
return[call[constant[{}
].format, parameter[name[retval]]]]
|
keyword[def] identifier[top_commenters] ( identifier[self] , identifier[num] ):
literal[string]
identifier[num] = identifier[min] ( identifier[num] , identifier[len] ( identifier[self] . identifier[commenters] ))
keyword[if] identifier[num] <= literal[int] :
keyword[return] literal[string]
identifier[top_commenters] = identifier[sorted] (
identifier[iteritems] ( identifier[self] . identifier[commenters] ),
identifier[key] = keyword[lambda] identifier[x] :(- identifier[sum] ( identifier[y] . identifier[score] keyword[for] identifier[y] keyword[in] identifier[x] [ literal[int] ]),
- identifier[len] ( identifier[x] [ literal[int] ]), identifier[str] ( identifier[x] [ literal[int] ])))[: identifier[num] ]
identifier[retval] = identifier[self] . identifier[post_header] . identifier[format] ( literal[string] )
keyword[for] identifier[author] , identifier[comments] keyword[in] identifier[top_commenters] :
identifier[retval] += literal[string] . identifier[format] (
identifier[self] . identifier[_user] ( identifier[author] ),
identifier[self] . identifier[_points] ( identifier[sum] ( identifier[x] . identifier[score] keyword[for] identifier[x] keyword[in] identifier[comments] )),
identifier[len] ( identifier[comments] ), literal[string] keyword[if] identifier[len] ( identifier[comments] )!= literal[int] keyword[else] literal[string] )
keyword[return] literal[string] . identifier[format] ( identifier[retval] )
|
def top_commenters(self, num):
"""Return a markdown representation of the top commenters."""
num = min(num, len(self.commenters))
if num <= 0:
return '' # depends on [control=['if'], data=[]]
top_commenters = sorted(iteritems(self.commenters), key=lambda x: (-sum((y.score for y in x[1])), -len(x[1]), str(x[0])))[:num]
retval = self.post_header.format('Top Commenters')
for (author, comments) in top_commenters:
retval += '1. {} ({}, {} comment{})\n'.format(self._user(author), self._points(sum((x.score for x in comments))), len(comments), 's' if len(comments) != 1 else '') # depends on [control=['for'], data=[]]
return '{}\n'.format(retval)
|
def decode(self, ids):
"""Decodes a list of integers into text."""
ids = text_encoder.pad_decr(ids)
subword_ids = ids
del ids
subwords = []
# Some ids correspond to bytes. Because unicode characters are composed of
# possibly multiple bytes, we attempt to decode contiguous lists of bytes
# all together. Invalid byte sequences are replaced with the unicode
# replacement (i.e. unknown) character U+FFFD.
prev_bytes = []
def consume_prev_bytes():
if prev_bytes:
bytestr = b"".join(prev_bytes)
bytes_text = bytestr.decode("utf-8", "replace")
subwords.append(bytes_text)
return []
for subword_id in subword_ids:
subword = self._id_to_subword(subword_id)
if isinstance(subword, six.binary_type):
# Byte-encoded
prev_bytes.append(subword)
else:
# If there were bytes previously, convert to unicode.
prev_bytes = consume_prev_bytes()
trimmed, add_space = _trim_underscore_and_tell(subword)
subwords.append(trimmed)
if add_space:
subwords.append(" ")
# If there were trailing bytes, convert to unicode.
prev_bytes = consume_prev_bytes()
return tf.compat.as_text("".join(subwords))
|
def function[decode, parameter[self, ids]]:
constant[Decodes a list of integers into text.]
variable[ids] assign[=] call[name[text_encoder].pad_decr, parameter[name[ids]]]
variable[subword_ids] assign[=] name[ids]
<ast.Delete object at 0x7da1b20125f0>
variable[subwords] assign[=] list[[]]
variable[prev_bytes] assign[=] list[[]]
def function[consume_prev_bytes, parameter[]]:
if name[prev_bytes] begin[:]
variable[bytestr] assign[=] call[constant[b''].join, parameter[name[prev_bytes]]]
variable[bytes_text] assign[=] call[name[bytestr].decode, parameter[constant[utf-8], constant[replace]]]
call[name[subwords].append, parameter[name[bytes_text]]]
return[list[[]]]
for taget[name[subword_id]] in starred[name[subword_ids]] begin[:]
variable[subword] assign[=] call[name[self]._id_to_subword, parameter[name[subword_id]]]
if call[name[isinstance], parameter[name[subword], name[six].binary_type]] begin[:]
call[name[prev_bytes].append, parameter[name[subword]]]
variable[prev_bytes] assign[=] call[name[consume_prev_bytes], parameter[]]
return[call[name[tf].compat.as_text, parameter[call[constant[].join, parameter[name[subwords]]]]]]
|
keyword[def] identifier[decode] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[ids] = identifier[text_encoder] . identifier[pad_decr] ( identifier[ids] )
identifier[subword_ids] = identifier[ids]
keyword[del] identifier[ids]
identifier[subwords] =[]
identifier[prev_bytes] =[]
keyword[def] identifier[consume_prev_bytes] ():
keyword[if] identifier[prev_bytes] :
identifier[bytestr] = literal[string] . identifier[join] ( identifier[prev_bytes] )
identifier[bytes_text] = identifier[bytestr] . identifier[decode] ( literal[string] , literal[string] )
identifier[subwords] . identifier[append] ( identifier[bytes_text] )
keyword[return] []
keyword[for] identifier[subword_id] keyword[in] identifier[subword_ids] :
identifier[subword] = identifier[self] . identifier[_id_to_subword] ( identifier[subword_id] )
keyword[if] identifier[isinstance] ( identifier[subword] , identifier[six] . identifier[binary_type] ):
identifier[prev_bytes] . identifier[append] ( identifier[subword] )
keyword[else] :
identifier[prev_bytes] = identifier[consume_prev_bytes] ()
identifier[trimmed] , identifier[add_space] = identifier[_trim_underscore_and_tell] ( identifier[subword] )
identifier[subwords] . identifier[append] ( identifier[trimmed] )
keyword[if] identifier[add_space] :
identifier[subwords] . identifier[append] ( literal[string] )
identifier[prev_bytes] = identifier[consume_prev_bytes] ()
keyword[return] identifier[tf] . identifier[compat] . identifier[as_text] ( literal[string] . identifier[join] ( identifier[subwords] ))
|
def decode(self, ids):
"""Decodes a list of integers into text."""
ids = text_encoder.pad_decr(ids)
subword_ids = ids
del ids
subwords = []
# Some ids correspond to bytes. Because unicode characters are composed of
# possibly multiple bytes, we attempt to decode contiguous lists of bytes
# all together. Invalid byte sequences are replaced with the unicode
# replacement (i.e. unknown) character U+FFFD.
prev_bytes = []
def consume_prev_bytes():
if prev_bytes:
bytestr = b''.join(prev_bytes)
bytes_text = bytestr.decode('utf-8', 'replace')
subwords.append(bytes_text) # depends on [control=['if'], data=[]]
return []
for subword_id in subword_ids:
subword = self._id_to_subword(subword_id)
if isinstance(subword, six.binary_type):
# Byte-encoded
prev_bytes.append(subword) # depends on [control=['if'], data=[]]
else:
# If there were bytes previously, convert to unicode.
prev_bytes = consume_prev_bytes()
(trimmed, add_space) = _trim_underscore_and_tell(subword)
subwords.append(trimmed)
if add_space:
subwords.append(' ') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subword_id']]
# If there were trailing bytes, convert to unicode.
prev_bytes = consume_prev_bytes()
return tf.compat.as_text(''.join(subwords))
|
def _get_relative_ext(of, sf):
"""Retrieve relative extension given the original and secondary files.
"""
def half_finished_trim(orig, prefix):
return (os.path.basename(prefix).count(".") > 0 and
os.path.basename(orig).count(".") == os.path.basename(prefix).count("."))
# Handle remote files
if of.find(":") > 0:
of = os.path.basename(of.split(":")[-1])
if sf.find(":") > 0:
sf = os.path.basename(sf.split(":")[-1])
prefix = os.path.commonprefix([sf, of])
while prefix.endswith(".") or (half_finished_trim(sf, prefix) and half_finished_trim(of, prefix)):
prefix = prefix[:-1]
exts_to_remove = of.replace(prefix, "")
ext_to_add = sf.replace(prefix, "")
# Return extensions relative to original
if not exts_to_remove or exts_to_remove.startswith("."):
return str("^" * exts_to_remove.count(".") + ext_to_add)
else:
raise ValueError("No cross platform way to reference complex extension: %s %s" % (sf, of))
|
def function[_get_relative_ext, parameter[of, sf]]:
constant[Retrieve relative extension given the original and secondary files.
]
def function[half_finished_trim, parameter[orig, prefix]]:
return[<ast.BoolOp object at 0x7da1b18ffd60>]
if compare[call[name[of].find, parameter[constant[:]]] greater[>] constant[0]] begin[:]
variable[of] assign[=] call[name[os].path.basename, parameter[call[call[name[of].split, parameter[constant[:]]]][<ast.UnaryOp object at 0x7da1b18ff490>]]]
if compare[call[name[sf].find, parameter[constant[:]]] greater[>] constant[0]] begin[:]
variable[sf] assign[=] call[name[os].path.basename, parameter[call[call[name[sf].split, parameter[constant[:]]]][<ast.UnaryOp object at 0x7da1b18d18d0>]]]
variable[prefix] assign[=] call[name[os].path.commonprefix, parameter[list[[<ast.Name object at 0x7da1b18d2920>, <ast.Name object at 0x7da1b18d1d80>]]]]
while <ast.BoolOp object at 0x7da1b18d2e30> begin[:]
variable[prefix] assign[=] call[name[prefix]][<ast.Slice object at 0x7da1b18d3100>]
variable[exts_to_remove] assign[=] call[name[of].replace, parameter[name[prefix], constant[]]]
variable[ext_to_add] assign[=] call[name[sf].replace, parameter[name[prefix], constant[]]]
if <ast.BoolOp object at 0x7da1b18d2dd0> begin[:]
return[call[name[str], parameter[binary_operation[binary_operation[constant[^] * call[name[exts_to_remove].count, parameter[constant[.]]]] + name[ext_to_add]]]]]
|
keyword[def] identifier[_get_relative_ext] ( identifier[of] , identifier[sf] ):
literal[string]
keyword[def] identifier[half_finished_trim] ( identifier[orig] , identifier[prefix] ):
keyword[return] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[prefix] ). identifier[count] ( literal[string] )> literal[int] keyword[and]
identifier[os] . identifier[path] . identifier[basename] ( identifier[orig] ). identifier[count] ( literal[string] )== identifier[os] . identifier[path] . identifier[basename] ( identifier[prefix] ). identifier[count] ( literal[string] ))
keyword[if] identifier[of] . identifier[find] ( literal[string] )> literal[int] :
identifier[of] = identifier[os] . identifier[path] . identifier[basename] ( identifier[of] . identifier[split] ( literal[string] )[- literal[int] ])
keyword[if] identifier[sf] . identifier[find] ( literal[string] )> literal[int] :
identifier[sf] = identifier[os] . identifier[path] . identifier[basename] ( identifier[sf] . identifier[split] ( literal[string] )[- literal[int] ])
identifier[prefix] = identifier[os] . identifier[path] . identifier[commonprefix] ([ identifier[sf] , identifier[of] ])
keyword[while] identifier[prefix] . identifier[endswith] ( literal[string] ) keyword[or] ( identifier[half_finished_trim] ( identifier[sf] , identifier[prefix] ) keyword[and] identifier[half_finished_trim] ( identifier[of] , identifier[prefix] )):
identifier[prefix] = identifier[prefix] [:- literal[int] ]
identifier[exts_to_remove] = identifier[of] . identifier[replace] ( identifier[prefix] , literal[string] )
identifier[ext_to_add] = identifier[sf] . identifier[replace] ( identifier[prefix] , literal[string] )
keyword[if] keyword[not] identifier[exts_to_remove] keyword[or] identifier[exts_to_remove] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[str] ( literal[string] * identifier[exts_to_remove] . identifier[count] ( literal[string] )+ identifier[ext_to_add] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[sf] , identifier[of] ))
|
def _get_relative_ext(of, sf):
"""Retrieve relative extension given the original and secondary files.
"""
def half_finished_trim(orig, prefix):
return os.path.basename(prefix).count('.') > 0 and os.path.basename(orig).count('.') == os.path.basename(prefix).count('.')
# Handle remote files
if of.find(':') > 0:
of = os.path.basename(of.split(':')[-1]) # depends on [control=['if'], data=[]]
if sf.find(':') > 0:
sf = os.path.basename(sf.split(':')[-1]) # depends on [control=['if'], data=[]]
prefix = os.path.commonprefix([sf, of])
while prefix.endswith('.') or (half_finished_trim(sf, prefix) and half_finished_trim(of, prefix)):
prefix = prefix[:-1] # depends on [control=['while'], data=[]]
exts_to_remove = of.replace(prefix, '')
ext_to_add = sf.replace(prefix, '')
# Return extensions relative to original
if not exts_to_remove or exts_to_remove.startswith('.'):
return str('^' * exts_to_remove.count('.') + ext_to_add) # depends on [control=['if'], data=[]]
else:
raise ValueError('No cross platform way to reference complex extension: %s %s' % (sf, of))
|
def get_queryset(self):
"""
Returns queryset instance.
:rtype: django.db.models.query.QuerySet.
"""
queryset = super(IndexView, self).get_queryset()
search_form = self.get_search_form()
if search_form.is_valid():
query_str = search_form.cleaned_data.get('q', '').strip()
queryset = self.model.objects.search(query_str)
return queryset
|
def function[get_queryset, parameter[self]]:
constant[
Returns queryset instance.
:rtype: django.db.models.query.QuerySet.
]
variable[queryset] assign[=] call[call[name[super], parameter[name[IndexView], name[self]]].get_queryset, parameter[]]
variable[search_form] assign[=] call[name[self].get_search_form, parameter[]]
if call[name[search_form].is_valid, parameter[]] begin[:]
variable[query_str] assign[=] call[call[name[search_form].cleaned_data.get, parameter[constant[q], constant[]]].strip, parameter[]]
variable[queryset] assign[=] call[name[self].model.objects.search, parameter[name[query_str]]]
return[name[queryset]]
|
keyword[def] identifier[get_queryset] ( identifier[self] ):
literal[string]
identifier[queryset] = identifier[super] ( identifier[IndexView] , identifier[self] ). identifier[get_queryset] ()
identifier[search_form] = identifier[self] . identifier[get_search_form] ()
keyword[if] identifier[search_form] . identifier[is_valid] ():
identifier[query_str] = identifier[search_form] . identifier[cleaned_data] . identifier[get] ( literal[string] , literal[string] ). identifier[strip] ()
identifier[queryset] = identifier[self] . identifier[model] . identifier[objects] . identifier[search] ( identifier[query_str] )
keyword[return] identifier[queryset]
|
def get_queryset(self):
"""
Returns queryset instance.
:rtype: django.db.models.query.QuerySet.
"""
queryset = super(IndexView, self).get_queryset()
search_form = self.get_search_form()
if search_form.is_valid():
query_str = search_form.cleaned_data.get('q', '').strip()
queryset = self.model.objects.search(query_str) # depends on [control=['if'], data=[]]
return queryset
|
def container_name(self):
"""
The container_name is the concatenation of ``image_name`` and a uuid1 string
We also remove the url portion of the ``image_name`` before using it.
"""
if getattr(self, "_container_name", NotSpecified) is NotSpecified:
self.container_name = "{0}-{1}".format(self.image_name.replace("/", "--").replace(":", "---"), str(uuid.uuid1()).lower())
return self._container_name
|
def function[container_name, parameter[self]]:
constant[
The container_name is the concatenation of ``image_name`` and a uuid1 string
We also remove the url portion of the ``image_name`` before using it.
]
if compare[call[name[getattr], parameter[name[self], constant[_container_name], name[NotSpecified]]] is name[NotSpecified]] begin[:]
name[self].container_name assign[=] call[constant[{0}-{1}].format, parameter[call[call[name[self].image_name.replace, parameter[constant[/], constant[--]]].replace, parameter[constant[:], constant[---]]], call[call[name[str], parameter[call[name[uuid].uuid1, parameter[]]]].lower, parameter[]]]]
return[name[self]._container_name]
|
keyword[def] identifier[container_name] ( identifier[self] ):
literal[string]
keyword[if] identifier[getattr] ( identifier[self] , literal[string] , identifier[NotSpecified] ) keyword[is] identifier[NotSpecified] :
identifier[self] . identifier[container_name] = literal[string] . identifier[format] ( identifier[self] . identifier[image_name] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ), identifier[str] ( identifier[uuid] . identifier[uuid1] ()). identifier[lower] ())
keyword[return] identifier[self] . identifier[_container_name]
|
def container_name(self):
"""
The container_name is the concatenation of ``image_name`` and a uuid1 string
We also remove the url portion of the ``image_name`` before using it.
"""
if getattr(self, '_container_name', NotSpecified) is NotSpecified:
self.container_name = '{0}-{1}'.format(self.image_name.replace('/', '--').replace(':', '---'), str(uuid.uuid1()).lower()) # depends on [control=['if'], data=[]]
return self._container_name
|
def _get_action_profile(x, indptr):
"""
Obtain a tuple of mixed actions from a flattened action profile.
Parameters
----------
x : array_like(float, ndim=1)
Array of flattened mixed action profile of length equal to n_0 +
... + n_N-1, where `out[indptr[i]:indptr[i+1]]` contains player
i's mixed action.
indptr : array_like(int, ndim=1)
Array of index pointers of length N+1, where `indptr[0] = 0` and
`indptr[i+1] = indptr[i] + n_i`.
Returns
-------
action_profile : tuple(ndarray(float, ndim=1))
Tuple of N mixed actions, each of length n_i.
"""
N = len(indptr) - 1
action_profile = tuple(x[indptr[i]:indptr[i+1]] for i in range(N))
return action_profile
|
def function[_get_action_profile, parameter[x, indptr]]:
constant[
Obtain a tuple of mixed actions from a flattened action profile.
Parameters
----------
x : array_like(float, ndim=1)
Array of flattened mixed action profile of length equal to n_0 +
... + n_N-1, where `out[indptr[i]:indptr[i+1]]` contains player
i's mixed action.
indptr : array_like(int, ndim=1)
Array of index pointers of length N+1, where `indptr[0] = 0` and
`indptr[i+1] = indptr[i] + n_i`.
Returns
-------
action_profile : tuple(ndarray(float, ndim=1))
Tuple of N mixed actions, each of length n_i.
]
variable[N] assign[=] binary_operation[call[name[len], parameter[name[indptr]]] - constant[1]]
variable[action_profile] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20c6c5ed0>]]
return[name[action_profile]]
|
keyword[def] identifier[_get_action_profile] ( identifier[x] , identifier[indptr] ):
literal[string]
identifier[N] = identifier[len] ( identifier[indptr] )- literal[int]
identifier[action_profile] = identifier[tuple] ( identifier[x] [ identifier[indptr] [ identifier[i] ]: identifier[indptr] [ identifier[i] + literal[int] ]] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N] ))
keyword[return] identifier[action_profile]
|
def _get_action_profile(x, indptr):
"""
Obtain a tuple of mixed actions from a flattened action profile.
Parameters
----------
x : array_like(float, ndim=1)
Array of flattened mixed action profile of length equal to n_0 +
... + n_N-1, where `out[indptr[i]:indptr[i+1]]` contains player
i's mixed action.
indptr : array_like(int, ndim=1)
Array of index pointers of length N+1, where `indptr[0] = 0` and
`indptr[i+1] = indptr[i] + n_i`.
Returns
-------
action_profile : tuple(ndarray(float, ndim=1))
Tuple of N mixed actions, each of length n_i.
"""
N = len(indptr) - 1
action_profile = tuple((x[indptr[i]:indptr[i + 1]] for i in range(N)))
return action_profile
|
def RunStateMethod(self, method_name, request=None, responses=None):
"""Completes the request by calling the state method.
Args:
method_name: The name of the state method to call.
request: A RequestState protobuf.
responses: A list of GrrMessages responding to the request.
"""
if self._TerminationPending():
return
client_id = None
try:
self.context.current_state = method_name
if request and responses:
client_id = request.client_id or self.runner_args.client_id
logging.debug("%s Running %s with %d responses from %s",
self.session_id, method_name, len(responses), client_id)
else:
logging.debug("%s Running state method %s", self.session_id,
method_name)
# Extend our lease if needed.
self.flow_obj.HeartBeat()
try:
method = getattr(self.flow_obj, method_name)
except AttributeError:
raise FlowRunnerError("Flow %s has no state method %s" %
(self.flow_obj.__class__.__name__, method_name))
# Prepare a responses object for the state method to use:
responses = flow_responses.Responses.FromLegacyResponses(
request=request, responses=responses)
self.SaveResourceUsage(responses.status)
stats_collector_instance.Get().IncrementCounter("grr_worker_states_run")
if method_name == "Start":
stats_collector_instance.Get().IncrementCounter(
"flow_starts", fields=[self.flow_obj.Name()])
method()
else:
method(responses)
if self.sent_replies:
self.ProcessRepliesWithOutputPlugins(self.sent_replies)
self.sent_replies = []
# We don't know here what exceptions can be thrown in the flow but we have
# to continue. Thus, we catch everything.
except Exception as e: # pylint: disable=broad-except
# This flow will terminate now
# TODO(user): Deprecate in favor of 'flow_errors'.
stats_collector_instance.Get().IncrementCounter("grr_flow_errors")
stats_collector_instance.Get().IncrementCounter(
"flow_errors", fields=[self.flow_obj.Name()])
logging.exception("Flow %s raised %s.", self.session_id, e)
self.Error(traceback.format_exc(), client_id=client_id)
|
def function[RunStateMethod, parameter[self, method_name, request, responses]]:
constant[Completes the request by calling the state method.
Args:
method_name: The name of the state method to call.
request: A RequestState protobuf.
responses: A list of GrrMessages responding to the request.
]
if call[name[self]._TerminationPending, parameter[]] begin[:]
return[None]
variable[client_id] assign[=] constant[None]
<ast.Try object at 0x7da1b1d91480>
|
keyword[def] identifier[RunStateMethod] ( identifier[self] , identifier[method_name] , identifier[request] = keyword[None] , identifier[responses] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_TerminationPending] ():
keyword[return]
identifier[client_id] = keyword[None]
keyword[try] :
identifier[self] . identifier[context] . identifier[current_state] = identifier[method_name]
keyword[if] identifier[request] keyword[and] identifier[responses] :
identifier[client_id] = identifier[request] . identifier[client_id] keyword[or] identifier[self] . identifier[runner_args] . identifier[client_id]
identifier[logging] . identifier[debug] ( literal[string] ,
identifier[self] . identifier[session_id] , identifier[method_name] , identifier[len] ( identifier[responses] ), identifier[client_id] )
keyword[else] :
identifier[logging] . identifier[debug] ( literal[string] , identifier[self] . identifier[session_id] ,
identifier[method_name] )
identifier[self] . identifier[flow_obj] . identifier[HeartBeat] ()
keyword[try] :
identifier[method] = identifier[getattr] ( identifier[self] . identifier[flow_obj] , identifier[method_name] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[FlowRunnerError] ( literal[string] %
( identifier[self] . identifier[flow_obj] . identifier[__class__] . identifier[__name__] , identifier[method_name] ))
identifier[responses] = identifier[flow_responses] . identifier[Responses] . identifier[FromLegacyResponses] (
identifier[request] = identifier[request] , identifier[responses] = identifier[responses] )
identifier[self] . identifier[SaveResourceUsage] ( identifier[responses] . identifier[status] )
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] ( literal[string] )
keyword[if] identifier[method_name] == literal[string] :
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] (
literal[string] , identifier[fields] =[ identifier[self] . identifier[flow_obj] . identifier[Name] ()])
identifier[method] ()
keyword[else] :
identifier[method] ( identifier[responses] )
keyword[if] identifier[self] . identifier[sent_replies] :
identifier[self] . identifier[ProcessRepliesWithOutputPlugins] ( identifier[self] . identifier[sent_replies] )
identifier[self] . identifier[sent_replies] =[]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] ( literal[string] )
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] (
literal[string] , identifier[fields] =[ identifier[self] . identifier[flow_obj] . identifier[Name] ()])
identifier[logging] . identifier[exception] ( literal[string] , identifier[self] . identifier[session_id] , identifier[e] )
identifier[self] . identifier[Error] ( identifier[traceback] . identifier[format_exc] (), identifier[client_id] = identifier[client_id] )
|
def RunStateMethod(self, method_name, request=None, responses=None):
"""Completes the request by calling the state method.
Args:
method_name: The name of the state method to call.
request: A RequestState protobuf.
responses: A list of GrrMessages responding to the request.
"""
if self._TerminationPending():
return # depends on [control=['if'], data=[]]
client_id = None
try:
self.context.current_state = method_name
if request and responses:
client_id = request.client_id or self.runner_args.client_id
logging.debug('%s Running %s with %d responses from %s', self.session_id, method_name, len(responses), client_id) # depends on [control=['if'], data=[]]
else:
logging.debug('%s Running state method %s', self.session_id, method_name)
# Extend our lease if needed.
self.flow_obj.HeartBeat()
try:
method = getattr(self.flow_obj, method_name) # depends on [control=['try'], data=[]]
except AttributeError:
raise FlowRunnerError('Flow %s has no state method %s' % (self.flow_obj.__class__.__name__, method_name)) # depends on [control=['except'], data=[]]
# Prepare a responses object for the state method to use:
responses = flow_responses.Responses.FromLegacyResponses(request=request, responses=responses)
self.SaveResourceUsage(responses.status)
stats_collector_instance.Get().IncrementCounter('grr_worker_states_run')
if method_name == 'Start':
stats_collector_instance.Get().IncrementCounter('flow_starts', fields=[self.flow_obj.Name()])
method() # depends on [control=['if'], data=[]]
else:
method(responses)
if self.sent_replies:
self.ProcessRepliesWithOutputPlugins(self.sent_replies)
self.sent_replies = [] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
# We don't know here what exceptions can be thrown in the flow but we have
# to continue. Thus, we catch everything.
except Exception as e: # pylint: disable=broad-except
# This flow will terminate now
# TODO(user): Deprecate in favor of 'flow_errors'.
stats_collector_instance.Get().IncrementCounter('grr_flow_errors')
stats_collector_instance.Get().IncrementCounter('flow_errors', fields=[self.flow_obj.Name()])
logging.exception('Flow %s raised %s.', self.session_id, e)
self.Error(traceback.format_exc(), client_id=client_id) # depends on [control=['except'], data=['e']]
|
async def open_interface_message(self, message, context):
"""Handle an open_interface message.
See :meth:`AbstractDeviceAdapter.open_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.open_interface(client_id, conn_string, interface)
|
<ast.AsyncFunctionDef object at 0x7da204344040>
|
keyword[async] keyword[def] identifier[open_interface_message] ( identifier[self] , identifier[message] , identifier[context] ):
literal[string]
identifier[conn_string] = identifier[message] . identifier[get] ( literal[string] )
identifier[interface] = identifier[message] . identifier[get] ( literal[string] )
identifier[client_id] = identifier[context] . identifier[user_data]
keyword[await] identifier[self] . identifier[open_interface] ( identifier[client_id] , identifier[conn_string] , identifier[interface] )
|
async def open_interface_message(self, message, context):
"""Handle an open_interface message.
See :meth:`AbstractDeviceAdapter.open_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.open_interface(client_id, conn_string, interface)
|
def render(self, context, request=None):
"""Render component"""
context['component'] = self
return render_to_string(self.template_name, context, request)
|
def function[render, parameter[self, context, request]]:
constant[Render component]
call[name[context]][constant[component]] assign[=] name[self]
return[call[name[render_to_string], parameter[name[self].template_name, name[context], name[request]]]]
|
keyword[def] identifier[render] ( identifier[self] , identifier[context] , identifier[request] = keyword[None] ):
literal[string]
identifier[context] [ literal[string] ]= identifier[self]
keyword[return] identifier[render_to_string] ( identifier[self] . identifier[template_name] , identifier[context] , identifier[request] )
|
def render(self, context, request=None):
"""Render component"""
context['component'] = self
return render_to_string(self.template_name, context, request)
|
def partial_transform(self, traj):
"""Featurize an MD trajectory into a vector space derived from
residue-residue distances
Parameters
----------
traj : mdtraj.Trajectory
A molecular dynamics trajectory to featurize.
Returns
-------
features : np.ndarray, dtype=float, shape=(n_samples, n_features)
A featurized trajectory is a 2D array of shape
`(length_of_trajectory x n_features)` where each `features[i]`
vector is computed by applying the featurization function
to the `i`th snapshot of the input trajectory.
See Also
--------
transform : simultaneously featurize a collection of MD trajectories
"""
if self.soft_min:
distances, _ = md.compute_contacts(traj, self.contacts,
self.scheme, self.ignore_nonprotein,
soft_min=self.soft_min,
soft_min_beta=self.soft_min_beta,
periodic=self.periodic)
else:
distances, _ = md.compute_contacts(traj, self.contacts,
self.scheme, self.ignore_nonprotein,
periodic=self.periodic)
return self._transform(distances)
|
def function[partial_transform, parameter[self, traj]]:
constant[Featurize an MD trajectory into a vector space derived from
residue-residue distances
Parameters
----------
traj : mdtraj.Trajectory
A molecular dynamics trajectory to featurize.
Returns
-------
features : np.ndarray, dtype=float, shape=(n_samples, n_features)
A featurized trajectory is a 2D array of shape
`(length_of_trajectory x n_features)` where each `features[i]`
vector is computed by applying the featurization function
to the `i`th snapshot of the input trajectory.
See Also
--------
transform : simultaneously featurize a collection of MD trajectories
]
if name[self].soft_min begin[:]
<ast.Tuple object at 0x7da1b0786c80> assign[=] call[name[md].compute_contacts, parameter[name[traj], name[self].contacts, name[self].scheme, name[self].ignore_nonprotein]]
return[call[name[self]._transform, parameter[name[distances]]]]
|
keyword[def] identifier[partial_transform] ( identifier[self] , identifier[traj] ):
literal[string]
keyword[if] identifier[self] . identifier[soft_min] :
identifier[distances] , identifier[_] = identifier[md] . identifier[compute_contacts] ( identifier[traj] , identifier[self] . identifier[contacts] ,
identifier[self] . identifier[scheme] , identifier[self] . identifier[ignore_nonprotein] ,
identifier[soft_min] = identifier[self] . identifier[soft_min] ,
identifier[soft_min_beta] = identifier[self] . identifier[soft_min_beta] ,
identifier[periodic] = identifier[self] . identifier[periodic] )
keyword[else] :
identifier[distances] , identifier[_] = identifier[md] . identifier[compute_contacts] ( identifier[traj] , identifier[self] . identifier[contacts] ,
identifier[self] . identifier[scheme] , identifier[self] . identifier[ignore_nonprotein] ,
identifier[periodic] = identifier[self] . identifier[periodic] )
keyword[return] identifier[self] . identifier[_transform] ( identifier[distances] )
|
def partial_transform(self, traj):
"""Featurize an MD trajectory into a vector space derived from
residue-residue distances
Parameters
----------
traj : mdtraj.Trajectory
A molecular dynamics trajectory to featurize.
Returns
-------
features : np.ndarray, dtype=float, shape=(n_samples, n_features)
A featurized trajectory is a 2D array of shape
`(length_of_trajectory x n_features)` where each `features[i]`
vector is computed by applying the featurization function
to the `i`th snapshot of the input trajectory.
See Also
--------
transform : simultaneously featurize a collection of MD trajectories
"""
if self.soft_min:
(distances, _) = md.compute_contacts(traj, self.contacts, self.scheme, self.ignore_nonprotein, soft_min=self.soft_min, soft_min_beta=self.soft_min_beta, periodic=self.periodic) # depends on [control=['if'], data=[]]
else:
(distances, _) = md.compute_contacts(traj, self.contacts, self.scheme, self.ignore_nonprotein, periodic=self.periodic)
return self._transform(distances)
|
def validate(self, value):
"""
Accepts: str, unicode
Returns: unicode
"""
val = value
if isinstance(val, str):
#FIXME: unsafe decoding
val = unicode(value)
val = super(String, self).validate(val)
if not isinstance(val, unicode):
raise ValueError("Not a string: %r" % (value, ))
return val
|
def function[validate, parameter[self, value]]:
constant[
Accepts: str, unicode
Returns: unicode
]
variable[val] assign[=] name[value]
if call[name[isinstance], parameter[name[val], name[str]]] begin[:]
variable[val] assign[=] call[name[unicode], parameter[name[value]]]
variable[val] assign[=] call[call[name[super], parameter[name[String], name[self]]].validate, parameter[name[val]]]
if <ast.UnaryOp object at 0x7da1b0a485b0> begin[:]
<ast.Raise object at 0x7da1b0a48b50>
return[name[val]]
|
keyword[def] identifier[validate] ( identifier[self] , identifier[value] ):
literal[string]
identifier[val] = identifier[value]
keyword[if] identifier[isinstance] ( identifier[val] , identifier[str] ):
identifier[val] = identifier[unicode] ( identifier[value] )
identifier[val] = identifier[super] ( identifier[String] , identifier[self] ). identifier[validate] ( identifier[val] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[unicode] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[value] ,))
keyword[return] identifier[val]
|
def validate(self, value):
"""
Accepts: str, unicode
Returns: unicode
"""
val = value
if isinstance(val, str):
#FIXME: unsafe decoding
val = unicode(value) # depends on [control=['if'], data=[]]
val = super(String, self).validate(val)
if not isinstance(val, unicode):
raise ValueError('Not a string: %r' % (value,)) # depends on [control=['if'], data=[]]
return val
|
def _publish_grade(self, score, only_if_higher=None):
"""
Publish a grade to the runtime.
"""
grade_dict = {
'value': score.raw_earned,
'max_value': score.raw_possible,
'only_if_higher': only_if_higher,
}
self.runtime.publish(self, 'grade', grade_dict)
|
def function[_publish_grade, parameter[self, score, only_if_higher]]:
constant[
Publish a grade to the runtime.
]
variable[grade_dict] assign[=] dictionary[[<ast.Constant object at 0x7da18f722170>, <ast.Constant object at 0x7da18f720670>, <ast.Constant object at 0x7da18f720370>], [<ast.Attribute object at 0x7da18f720820>, <ast.Attribute object at 0x7da18f723c10>, <ast.Name object at 0x7da18f7218d0>]]
call[name[self].runtime.publish, parameter[name[self], constant[grade], name[grade_dict]]]
|
keyword[def] identifier[_publish_grade] ( identifier[self] , identifier[score] , identifier[only_if_higher] = keyword[None] ):
literal[string]
identifier[grade_dict] ={
literal[string] : identifier[score] . identifier[raw_earned] ,
literal[string] : identifier[score] . identifier[raw_possible] ,
literal[string] : identifier[only_if_higher] ,
}
identifier[self] . identifier[runtime] . identifier[publish] ( identifier[self] , literal[string] , identifier[grade_dict] )
|
def _publish_grade(self, score, only_if_higher=None):
"""
Publish a grade to the runtime.
"""
grade_dict = {'value': score.raw_earned, 'max_value': score.raw_possible, 'only_if_higher': only_if_higher}
self.runtime.publish(self, 'grade', grade_dict)
|
def __get_trace_facvar(self, polynomial):
"""Return dense vector representation of a polynomial. This function is
nearly identical to __push_facvar_sparse, but instead of pushing
sparse entries to the constraint matrices, it returns a dense
vector.
"""
facvar = [0] * (self.n_vars + 1)
F = {}
for i in range(self.matrix_var_dim):
for j in range(self.matrix_var_dim):
for key, value in \
polynomial[i, j].as_coefficients_dict().items():
skey = apply_substitutions(key, self.substitutions,
self.pure_substitution_rules)
try:
Fk = F[skey]
except KeyError:
Fk = zeros(self.matrix_var_dim, self.matrix_var_dim)
Fk[i, j] += value
F[skey] = Fk
# This is the tracing part
for key, Fk in F.items():
if key == S.One:
k = 1
else:
k = self.monomial_index[key]
for i in range(self.matrix_var_dim):
for j in range(self.matrix_var_dim):
sym_matrix = zeros(self.matrix_var_dim,
self.matrix_var_dim)
sym_matrix[i, j] = 1
facvar[k+i*self.matrix_var_dim+j] = (sym_matrix*Fk).trace()
facvar = [float(f) for f in facvar]
return facvar
|
def function[__get_trace_facvar, parameter[self, polynomial]]:
constant[Return dense vector representation of a polynomial. This function is
nearly identical to __push_facvar_sparse, but instead of pushing
sparse entries to the constraint matrices, it returns a dense
vector.
]
variable[facvar] assign[=] binary_operation[list[[<ast.Constant object at 0x7da2054a49a0>]] * binary_operation[name[self].n_vars + constant[1]]]
variable[F] assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[range], parameter[name[self].matrix_var_dim]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[self].matrix_var_dim]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2054a4e80>, <ast.Name object at 0x7da2054a4b20>]]] in starred[call[call[call[name[polynomial]][tuple[[<ast.Name object at 0x7da2054a4eb0>, <ast.Name object at 0x7da2054a4ca0>]]].as_coefficients_dict, parameter[]].items, parameter[]]] begin[:]
variable[skey] assign[=] call[name[apply_substitutions], parameter[name[key], name[self].substitutions, name[self].pure_substitution_rules]]
<ast.Try object at 0x7da2054a64a0>
<ast.AugAssign object at 0x7da2054a5780>
call[name[F]][name[skey]] assign[=] name[Fk]
for taget[tuple[[<ast.Name object at 0x7da2054a6770>, <ast.Name object at 0x7da2054a4a60>]]] in starred[call[name[F].items, parameter[]]] begin[:]
if compare[name[key] equal[==] name[S].One] begin[:]
variable[k] assign[=] constant[1]
for taget[name[i]] in starred[call[name[range], parameter[name[self].matrix_var_dim]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[self].matrix_var_dim]]] begin[:]
variable[sym_matrix] assign[=] call[name[zeros], parameter[name[self].matrix_var_dim, name[self].matrix_var_dim]]
call[name[sym_matrix]][tuple[[<ast.Name object at 0x7da2054a5810>, <ast.Name object at 0x7da2054a4340>]]] assign[=] constant[1]
call[name[facvar]][binary_operation[binary_operation[name[k] + binary_operation[name[i] * name[self].matrix_var_dim]] + name[j]]] assign[=] call[binary_operation[name[sym_matrix] * name[Fk]].trace, parameter[]]
variable[facvar] assign[=] <ast.ListComp object at 0x7da2054a6dd0>
return[name[facvar]]
|
keyword[def] identifier[__get_trace_facvar] ( identifier[self] , identifier[polynomial] ):
literal[string]
identifier[facvar] =[ literal[int] ]*( identifier[self] . identifier[n_vars] + literal[int] )
identifier[F] ={}
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[matrix_var_dim] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[matrix_var_dim] ):
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[polynomial] [ identifier[i] , identifier[j] ]. identifier[as_coefficients_dict] (). identifier[items] ():
identifier[skey] = identifier[apply_substitutions] ( identifier[key] , identifier[self] . identifier[substitutions] ,
identifier[self] . identifier[pure_substitution_rules] )
keyword[try] :
identifier[Fk] = identifier[F] [ identifier[skey] ]
keyword[except] identifier[KeyError] :
identifier[Fk] = identifier[zeros] ( identifier[self] . identifier[matrix_var_dim] , identifier[self] . identifier[matrix_var_dim] )
identifier[Fk] [ identifier[i] , identifier[j] ]+= identifier[value]
identifier[F] [ identifier[skey] ]= identifier[Fk]
keyword[for] identifier[key] , identifier[Fk] keyword[in] identifier[F] . identifier[items] ():
keyword[if] identifier[key] == identifier[S] . identifier[One] :
identifier[k] = literal[int]
keyword[else] :
identifier[k] = identifier[self] . identifier[monomial_index] [ identifier[key] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[matrix_var_dim] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[matrix_var_dim] ):
identifier[sym_matrix] = identifier[zeros] ( identifier[self] . identifier[matrix_var_dim] ,
identifier[self] . identifier[matrix_var_dim] )
identifier[sym_matrix] [ identifier[i] , identifier[j] ]= literal[int]
identifier[facvar] [ identifier[k] + identifier[i] * identifier[self] . identifier[matrix_var_dim] + identifier[j] ]=( identifier[sym_matrix] * identifier[Fk] ). identifier[trace] ()
identifier[facvar] =[ identifier[float] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[facvar] ]
keyword[return] identifier[facvar]
|
def __get_trace_facvar(self, polynomial):
"""Return dense vector representation of a polynomial. This function is
nearly identical to __push_facvar_sparse, but instead of pushing
sparse entries to the constraint matrices, it returns a dense
vector.
"""
facvar = [0] * (self.n_vars + 1)
F = {}
for i in range(self.matrix_var_dim):
for j in range(self.matrix_var_dim):
for (key, value) in polynomial[i, j].as_coefficients_dict().items():
skey = apply_substitutions(key, self.substitutions, self.pure_substitution_rules)
try:
Fk = F[skey] # depends on [control=['try'], data=[]]
except KeyError:
Fk = zeros(self.matrix_var_dim, self.matrix_var_dim) # depends on [control=['except'], data=[]]
Fk[i, j] += value
F[skey] = Fk # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
# This is the tracing part
for (key, Fk) in F.items():
if key == S.One:
k = 1 # depends on [control=['if'], data=[]]
else:
k = self.monomial_index[key]
for i in range(self.matrix_var_dim):
for j in range(self.matrix_var_dim):
sym_matrix = zeros(self.matrix_var_dim, self.matrix_var_dim)
sym_matrix[i, j] = 1
facvar[k + i * self.matrix_var_dim + j] = (sym_matrix * Fk).trace() # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=[]]
facvar = [float(f) for f in facvar]
return facvar
|
def getButtonIdNameFromEnum(self, eButtonId):
"""returns the name of an EVRButtonId enum value. This function is deprecated in favor of the new IVRInput system."""
fn = self.function_table.getButtonIdNameFromEnum
result = fn(eButtonId)
return result
|
def function[getButtonIdNameFromEnum, parameter[self, eButtonId]]:
constant[returns the name of an EVRButtonId enum value. This function is deprecated in favor of the new IVRInput system.]
variable[fn] assign[=] name[self].function_table.getButtonIdNameFromEnum
variable[result] assign[=] call[name[fn], parameter[name[eButtonId]]]
return[name[result]]
|
keyword[def] identifier[getButtonIdNameFromEnum] ( identifier[self] , identifier[eButtonId] ):
literal[string]
identifier[fn] = identifier[self] . identifier[function_table] . identifier[getButtonIdNameFromEnum]
identifier[result] = identifier[fn] ( identifier[eButtonId] )
keyword[return] identifier[result]
|
def getButtonIdNameFromEnum(self, eButtonId):
"""returns the name of an EVRButtonId enum value. This function is deprecated in favor of the new IVRInput system."""
fn = self.function_table.getButtonIdNameFromEnum
result = fn(eButtonId)
return result
|
def error(self, msg):
"""Called to handle an error message received from the server.
This method just logs the error message
returned:
NO_RESPONSE_NEEDED
"""
body = msg['body'].replace(NULL, '')
brief_msg = ""
if 'message' in msg['headers']:
brief_msg = msg['headers']['message']
self.log.error("Received server error - message%s\n\n%s" % (brief_msg, body))
returned = NO_RESPONSE_NEEDED
if self.testing:
returned = 'error'
return returned
|
def function[error, parameter[self, msg]]:
constant[Called to handle an error message received from the server.
This method just logs the error message
returned:
NO_RESPONSE_NEEDED
]
variable[body] assign[=] call[call[name[msg]][constant[body]].replace, parameter[name[NULL], constant[]]]
variable[brief_msg] assign[=] constant[]
if compare[constant[message] in call[name[msg]][constant[headers]]] begin[:]
variable[brief_msg] assign[=] call[call[name[msg]][constant[headers]]][constant[message]]
call[name[self].log.error, parameter[binary_operation[constant[Received server error - message%s
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204567a60>, <ast.Name object at 0x7da204567010>]]]]]
variable[returned] assign[=] name[NO_RESPONSE_NEEDED]
if name[self].testing begin[:]
variable[returned] assign[=] constant[error]
return[name[returned]]
|
keyword[def] identifier[error] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[body] = identifier[msg] [ literal[string] ]. identifier[replace] ( identifier[NULL] , literal[string] )
identifier[brief_msg] = literal[string]
keyword[if] literal[string] keyword[in] identifier[msg] [ literal[string] ]:
identifier[brief_msg] = identifier[msg] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[log] . identifier[error] ( literal[string] %( identifier[brief_msg] , identifier[body] ))
identifier[returned] = identifier[NO_RESPONSE_NEEDED]
keyword[if] identifier[self] . identifier[testing] :
identifier[returned] = literal[string]
keyword[return] identifier[returned]
|
def error(self, msg):
"""Called to handle an error message received from the server.
This method just logs the error message
returned:
NO_RESPONSE_NEEDED
"""
body = msg['body'].replace(NULL, '')
brief_msg = ''
if 'message' in msg['headers']:
brief_msg = msg['headers']['message'] # depends on [control=['if'], data=[]]
self.log.error('Received server error - message%s\n\n%s' % (brief_msg, body))
returned = NO_RESPONSE_NEEDED
if self.testing:
returned = 'error' # depends on [control=['if'], data=[]]
return returned
|
def _construct_deutsch_jozsa_circuit(self):
"""
Builds the Deutsch-Jozsa circuit. Which can determine whether a function f mapping
:math:`\{0,1\}^n \to \{0,1\}` is constant or balanced, provided that it is one of them.
:return: A program corresponding to the desired instance of Deutsch Jozsa's Algorithm.
:rtype: Program
"""
dj_prog = Program()
# Put the first ancilla qubit (query qubit) into minus state
dj_prog.inst(X(self.ancillas[0]), H(self.ancillas[0]))
# Apply Hadamard, Oracle, and Hadamard again
dj_prog.inst([H(qubit) for qubit in self.computational_qubits])
# Build the oracle
oracle_prog = Program()
oracle_prog.defgate(ORACLE_GATE_NAME, self.unitary_matrix)
scratch_bit = self.ancillas[1]
qubits_for_funct = [scratch_bit] + self.computational_qubits
oracle_prog.inst(tuple([ORACLE_GATE_NAME] + qubits_for_funct))
dj_prog += oracle_prog
# Here the oracle does not leave the computational qubits unchanged, so we use a CNOT to
# to move the result to the query qubit, and then we uncompute with the dagger.
dj_prog.inst(CNOT(self._qubits[0], self.ancillas[0]))
dj_prog += oracle_prog.dagger()
dj_prog.inst([H(qubit) for qubit in self.computational_qubits])
return dj_prog
|
def function[_construct_deutsch_jozsa_circuit, parameter[self]]:
constant[
Builds the Deutsch-Jozsa circuit. Which can determine whether a function f mapping
:math:`\{0,1\}^n o \{0,1\}` is constant or balanced, provided that it is one of them.
:return: A program corresponding to the desired instance of Deutsch Jozsa's Algorithm.
:rtype: Program
]
variable[dj_prog] assign[=] call[name[Program], parameter[]]
call[name[dj_prog].inst, parameter[call[name[X], parameter[call[name[self].ancillas][constant[0]]]], call[name[H], parameter[call[name[self].ancillas][constant[0]]]]]]
call[name[dj_prog].inst, parameter[<ast.ListComp object at 0x7da2054a7dc0>]]
variable[oracle_prog] assign[=] call[name[Program], parameter[]]
call[name[oracle_prog].defgate, parameter[name[ORACLE_GATE_NAME], name[self].unitary_matrix]]
variable[scratch_bit] assign[=] call[name[self].ancillas][constant[1]]
variable[qubits_for_funct] assign[=] binary_operation[list[[<ast.Name object at 0x7da2054a49a0>]] + name[self].computational_qubits]
call[name[oracle_prog].inst, parameter[call[name[tuple], parameter[binary_operation[list[[<ast.Name object at 0x7da2054a45b0>]] + name[qubits_for_funct]]]]]]
<ast.AugAssign object at 0x7da2054a44f0>
call[name[dj_prog].inst, parameter[call[name[CNOT], parameter[call[name[self]._qubits][constant[0]], call[name[self].ancillas][constant[0]]]]]]
<ast.AugAssign object at 0x7da207f02020>
call[name[dj_prog].inst, parameter[<ast.ListComp object at 0x7da207f03430>]]
return[name[dj_prog]]
|
keyword[def] identifier[_construct_deutsch_jozsa_circuit] ( identifier[self] ):
literal[string]
identifier[dj_prog] = identifier[Program] ()
identifier[dj_prog] . identifier[inst] ( identifier[X] ( identifier[self] . identifier[ancillas] [ literal[int] ]), identifier[H] ( identifier[self] . identifier[ancillas] [ literal[int] ]))
identifier[dj_prog] . identifier[inst] ([ identifier[H] ( identifier[qubit] ) keyword[for] identifier[qubit] keyword[in] identifier[self] . identifier[computational_qubits] ])
identifier[oracle_prog] = identifier[Program] ()
identifier[oracle_prog] . identifier[defgate] ( identifier[ORACLE_GATE_NAME] , identifier[self] . identifier[unitary_matrix] )
identifier[scratch_bit] = identifier[self] . identifier[ancillas] [ literal[int] ]
identifier[qubits_for_funct] =[ identifier[scratch_bit] ]+ identifier[self] . identifier[computational_qubits]
identifier[oracle_prog] . identifier[inst] ( identifier[tuple] ([ identifier[ORACLE_GATE_NAME] ]+ identifier[qubits_for_funct] ))
identifier[dj_prog] += identifier[oracle_prog]
identifier[dj_prog] . identifier[inst] ( identifier[CNOT] ( identifier[self] . identifier[_qubits] [ literal[int] ], identifier[self] . identifier[ancillas] [ literal[int] ]))
identifier[dj_prog] += identifier[oracle_prog] . identifier[dagger] ()
identifier[dj_prog] . identifier[inst] ([ identifier[H] ( identifier[qubit] ) keyword[for] identifier[qubit] keyword[in] identifier[self] . identifier[computational_qubits] ])
keyword[return] identifier[dj_prog]
|
def _construct_deutsch_jozsa_circuit(self):
"""
Builds the Deutsch-Jozsa circuit. Which can determine whether a function f mapping
:math:`\\{0,1\\}^n o \\{0,1\\}` is constant or balanced, provided that it is one of them.
:return: A program corresponding to the desired instance of Deutsch Jozsa's Algorithm.
:rtype: Program
"""
dj_prog = Program()
# Put the first ancilla qubit (query qubit) into minus state
dj_prog.inst(X(self.ancillas[0]), H(self.ancillas[0]))
# Apply Hadamard, Oracle, and Hadamard again
dj_prog.inst([H(qubit) for qubit in self.computational_qubits])
# Build the oracle
oracle_prog = Program()
oracle_prog.defgate(ORACLE_GATE_NAME, self.unitary_matrix)
scratch_bit = self.ancillas[1]
qubits_for_funct = [scratch_bit] + self.computational_qubits
oracle_prog.inst(tuple([ORACLE_GATE_NAME] + qubits_for_funct))
dj_prog += oracle_prog
# Here the oracle does not leave the computational qubits unchanged, so we use a CNOT to
# to move the result to the query qubit, and then we uncompute with the dagger.
dj_prog.inst(CNOT(self._qubits[0], self.ancillas[0]))
dj_prog += oracle_prog.dagger()
dj_prog.inst([H(qubit) for qubit in self.computational_qubits])
return dj_prog
|
def copy(self, name, load_existing_results=False):
"""
Returns a copy of the bpp object with the same parameter settings
but with the files.mcmcfiles and files.outfiles attributes cleared,
and with a new 'name' attribute.
Parameters
----------
name (str):
A name for the new copied bpp object that will be used for the
output files created by the object.
"""
## make deepcopy of self.__dict__ but do not copy async objects
subdict = {i:j for i,j in self.__dict__.iteritems() if i != "asyncs"}
newdict = copy.deepcopy(subdict)
## make back into a bpp object
if name == self.name:
raise Exception("new object must have a different 'name' than its parent")
newobj = Bpp(
name=name,
data=newdict["files"].data,
workdir=newdict["workdir"],
guidetree=newdict["tree"].write(),
imap={i:j for i, j in newdict["imap"].items()},
copied=True,
load_existing_results=load_existing_results,
)
## update special dict attributes but not files
for key, val in newobj.params.__dict__.iteritems():
newobj.params.__setattr__(key, self.params.__getattribute__(key))
for key, val in newobj.filters.__dict__.iteritems():
newobj.filters.__setattr__(key, self.filters.__getattribute__(key))
## new object must have a different name than it's parent
return newobj
|
def function[copy, parameter[self, name, load_existing_results]]:
constant[
Returns a copy of the bpp object with the same parameter settings
but with the files.mcmcfiles and files.outfiles attributes cleared,
and with a new 'name' attribute.
Parameters
----------
name (str):
A name for the new copied bpp object that will be used for the
output files created by the object.
]
variable[subdict] assign[=] <ast.DictComp object at 0x7da207f981c0>
variable[newdict] assign[=] call[name[copy].deepcopy, parameter[name[subdict]]]
if compare[name[name] equal[==] name[self].name] begin[:]
<ast.Raise object at 0x7da207f9bb50>
variable[newobj] assign[=] call[name[Bpp], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6abbe0>, <ast.Name object at 0x7da20c6aab90>]]] in starred[call[name[newobj].params.__dict__.iteritems, parameter[]]] begin[:]
call[name[newobj].params.__setattr__, parameter[name[key], call[name[self].params.__getattribute__, parameter[name[key]]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6ab880>, <ast.Name object at 0x7da20c6a8670>]]] in starred[call[name[newobj].filters.__dict__.iteritems, parameter[]]] begin[:]
call[name[newobj].filters.__setattr__, parameter[name[key], call[name[self].filters.__getattribute__, parameter[name[key]]]]]
return[name[newobj]]
|
keyword[def] identifier[copy] ( identifier[self] , identifier[name] , identifier[load_existing_results] = keyword[False] ):
literal[string]
identifier[subdict] ={ identifier[i] : identifier[j] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[self] . identifier[__dict__] . identifier[iteritems] () keyword[if] identifier[i] != literal[string] }
identifier[newdict] = identifier[copy] . identifier[deepcopy] ( identifier[subdict] )
keyword[if] identifier[name] == identifier[self] . identifier[name] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[newobj] = identifier[Bpp] (
identifier[name] = identifier[name] ,
identifier[data] = identifier[newdict] [ literal[string] ]. identifier[data] ,
identifier[workdir] = identifier[newdict] [ literal[string] ],
identifier[guidetree] = identifier[newdict] [ literal[string] ]. identifier[write] (),
identifier[imap] ={ identifier[i] : identifier[j] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[newdict] [ literal[string] ]. identifier[items] ()},
identifier[copied] = keyword[True] ,
identifier[load_existing_results] = identifier[load_existing_results] ,
)
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[newobj] . identifier[params] . identifier[__dict__] . identifier[iteritems] ():
identifier[newobj] . identifier[params] . identifier[__setattr__] ( identifier[key] , identifier[self] . identifier[params] . identifier[__getattribute__] ( identifier[key] ))
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[newobj] . identifier[filters] . identifier[__dict__] . identifier[iteritems] ():
identifier[newobj] . identifier[filters] . identifier[__setattr__] ( identifier[key] , identifier[self] . identifier[filters] . identifier[__getattribute__] ( identifier[key] ))
keyword[return] identifier[newobj]
|
def copy(self, name, load_existing_results=False):
"""
Returns a copy of the bpp object with the same parameter settings
but with the files.mcmcfiles and files.outfiles attributes cleared,
and with a new 'name' attribute.
Parameters
----------
name (str):
A name for the new copied bpp object that will be used for the
output files created by the object.
"""
## make deepcopy of self.__dict__ but do not copy async objects
subdict = {i: j for (i, j) in self.__dict__.iteritems() if i != 'asyncs'}
newdict = copy.deepcopy(subdict)
## make back into a bpp object
if name == self.name:
raise Exception("new object must have a different 'name' than its parent") # depends on [control=['if'], data=[]]
newobj = Bpp(name=name, data=newdict['files'].data, workdir=newdict['workdir'], guidetree=newdict['tree'].write(), imap={i: j for (i, j) in newdict['imap'].items()}, copied=True, load_existing_results=load_existing_results)
## update special dict attributes but not files
for (key, val) in newobj.params.__dict__.iteritems():
newobj.params.__setattr__(key, self.params.__getattribute__(key)) # depends on [control=['for'], data=[]]
for (key, val) in newobj.filters.__dict__.iteritems():
newobj.filters.__setattr__(key, self.filters.__getattribute__(key)) # depends on [control=['for'], data=[]]
## new object must have a different name than it's parent
return newobj
|
def _get_content(cls, url, headers=HTTP_HEADERS):
"""
Get http content
:param url: contents url
:param headers: http header
:return: BeautifulSoup object
"""
session = requests.Session()
return session.get(url, headers=headers)
|
def function[_get_content, parameter[cls, url, headers]]:
constant[
Get http content
:param url: contents url
:param headers: http header
:return: BeautifulSoup object
]
variable[session] assign[=] call[name[requests].Session, parameter[]]
return[call[name[session].get, parameter[name[url]]]]
|
keyword[def] identifier[_get_content] ( identifier[cls] , identifier[url] , identifier[headers] = identifier[HTTP_HEADERS] ):
literal[string]
identifier[session] = identifier[requests] . identifier[Session] ()
keyword[return] identifier[session] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] )
|
def _get_content(cls, url, headers=HTTP_HEADERS):
"""
Get http content
:param url: contents url
:param headers: http header
:return: BeautifulSoup object
"""
session = requests.Session()
return session.get(url, headers=headers)
|
def send_theme_file(self, filename):
"""
Function used to send static theme files from the theme folder to the browser.
"""
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.config['THEME_STATIC_FOLDER'], filename,
cache_timeout=cache_timeout)
|
def function[send_theme_file, parameter[self, filename]]:
constant[
Function used to send static theme files from the theme folder to the browser.
]
variable[cache_timeout] assign[=] call[name[self].get_send_file_max_age, parameter[name[filename]]]
return[call[name[send_from_directory], parameter[call[name[self].config][constant[THEME_STATIC_FOLDER]], name[filename]]]]
|
keyword[def] identifier[send_theme_file] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[cache_timeout] = identifier[self] . identifier[get_send_file_max_age] ( identifier[filename] )
keyword[return] identifier[send_from_directory] ( identifier[self] . identifier[config] [ literal[string] ], identifier[filename] ,
identifier[cache_timeout] = identifier[cache_timeout] )
|
def send_theme_file(self, filename):
"""
Function used to send static theme files from the theme folder to the browser.
"""
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.config['THEME_STATIC_FOLDER'], filename, cache_timeout=cache_timeout)
|
def generate_contents(startpath, outfilename=DEFAULT_BUILDFILE):
"""
Generate a build file (yaml) based on the contents of a
directory tree.
"""
def _ignored_name(name):
return (
name.startswith('.') or
name == PACKAGE_DIR_NAME or
name.endswith('~') or
name == outfilename
)
def _generate_contents(dir_path):
safename_duplicates = defaultdict(list)
for name in os.listdir(dir_path):
if _ignored_name(name):
continue
path = os.path.join(dir_path, name)
if os.path.isdir(path):
nodename = name
ext = None
elif os.path.isfile(path):
nodename, ext = splitext_no_dot(name)
else:
continue
safename = to_identifier(nodename)
safename_duplicates[safename].append((name, nodename, ext))
safename_to_name = {}
for safename, duplicates in iteritems(safename_duplicates):
for name, nodename, ext in duplicates:
if len(duplicates) > 1 and ext:
new_safename = to_identifier(name) # Name with ext
else:
new_safename = safename
existing_name = safename_to_name.get(new_safename)
if existing_name is not None:
message = "Duplicate node names in directory %r."
message += " %r was renamed to %r, which overlaps with %r"
raise BuildException(
message % (dir_path, name, new_safename, existing_name)
)
safename_to_name[new_safename] = name
contents = {}
for safename, name in iteritems(safename_to_name):
path = os.path.join(dir_path, name)
if os.path.isdir(path):
data = _generate_contents(path)
else:
rel_path = os.path.relpath(path, startpath)
data = dict(file=rel_path)
contents[safename] = data
return contents
return dict(
contents=_generate_contents(startpath)
)
|
def function[generate_contents, parameter[startpath, outfilename]]:
constant[
Generate a build file (yaml) based on the contents of a
directory tree.
]
def function[_ignored_name, parameter[name]]:
return[<ast.BoolOp object at 0x7da1b1240ac0>]
def function[_generate_contents, parameter[dir_path]]:
variable[safename_duplicates] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[name]] in starred[call[name[os].listdir, parameter[name[dir_path]]]] begin[:]
if call[name[_ignored_name], parameter[name[name]]] begin[:]
continue
variable[path] assign[=] call[name[os].path.join, parameter[name[dir_path], name[name]]]
if call[name[os].path.isdir, parameter[name[path]]] begin[:]
variable[nodename] assign[=] name[name]
variable[ext] assign[=] constant[None]
variable[safename] assign[=] call[name[to_identifier], parameter[name[nodename]]]
call[call[name[safename_duplicates]][name[safename]].append, parameter[tuple[[<ast.Name object at 0x7da1b1241a80>, <ast.Name object at 0x7da1b1241ab0>, <ast.Name object at 0x7da1b1241ae0>]]]]
variable[safename_to_name] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1241c30>, <ast.Name object at 0x7da1b1241c60>]]] in starred[call[name[iteritems], parameter[name[safename_duplicates]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1241d80>, <ast.Name object at 0x7da1b1241db0>, <ast.Name object at 0x7da1b1241de0>]]] in starred[name[duplicates]] begin[:]
if <ast.BoolOp object at 0x7da1b1241e70> begin[:]
variable[new_safename] assign[=] call[name[to_identifier], parameter[name[name]]]
variable[existing_name] assign[=] call[name[safename_to_name].get, parameter[name[new_safename]]]
if compare[name[existing_name] is_not constant[None]] begin[:]
variable[message] assign[=] constant[Duplicate node names in directory %r.]
<ast.AugAssign object at 0x7da1b12423b0>
<ast.Raise object at 0x7da1b1242440>
call[name[safename_to_name]][name[new_safename]] assign[=] name[name]
variable[contents] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b12432e0>, <ast.Name object at 0x7da1b12432b0>]]] in starred[call[name[iteritems], parameter[name[safename_to_name]]]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[dir_path], name[name]]]
if call[name[os].path.isdir, parameter[name[path]]] begin[:]
variable[data] assign[=] call[name[_generate_contents], parameter[name[path]]]
call[name[contents]][name[safename]] assign[=] name[data]
return[name[contents]]
return[call[name[dict], parameter[]]]
|
keyword[def] identifier[generate_contents] ( identifier[startpath] , identifier[outfilename] = identifier[DEFAULT_BUILDFILE] ):
literal[string]
keyword[def] identifier[_ignored_name] ( identifier[name] ):
keyword[return] (
identifier[name] . identifier[startswith] ( literal[string] ) keyword[or]
identifier[name] == identifier[PACKAGE_DIR_NAME] keyword[or]
identifier[name] . identifier[endswith] ( literal[string] ) keyword[or]
identifier[name] == identifier[outfilename]
)
keyword[def] identifier[_generate_contents] ( identifier[dir_path] ):
identifier[safename_duplicates] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[dir_path] ):
keyword[if] identifier[_ignored_name] ( identifier[name] ):
keyword[continue]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
identifier[nodename] = identifier[name]
identifier[ext] = keyword[None]
keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
identifier[nodename] , identifier[ext] = identifier[splitext_no_dot] ( identifier[name] )
keyword[else] :
keyword[continue]
identifier[safename] = identifier[to_identifier] ( identifier[nodename] )
identifier[safename_duplicates] [ identifier[safename] ]. identifier[append] (( identifier[name] , identifier[nodename] , identifier[ext] ))
identifier[safename_to_name] ={}
keyword[for] identifier[safename] , identifier[duplicates] keyword[in] identifier[iteritems] ( identifier[safename_duplicates] ):
keyword[for] identifier[name] , identifier[nodename] , identifier[ext] keyword[in] identifier[duplicates] :
keyword[if] identifier[len] ( identifier[duplicates] )> literal[int] keyword[and] identifier[ext] :
identifier[new_safename] = identifier[to_identifier] ( identifier[name] )
keyword[else] :
identifier[new_safename] = identifier[safename]
identifier[existing_name] = identifier[safename_to_name] . identifier[get] ( identifier[new_safename] )
keyword[if] identifier[existing_name] keyword[is] keyword[not] keyword[None] :
identifier[message] = literal[string]
identifier[message] += literal[string]
keyword[raise] identifier[BuildException] (
identifier[message] %( identifier[dir_path] , identifier[name] , identifier[new_safename] , identifier[existing_name] )
)
identifier[safename_to_name] [ identifier[new_safename] ]= identifier[name]
identifier[contents] ={}
keyword[for] identifier[safename] , identifier[name] keyword[in] identifier[iteritems] ( identifier[safename_to_name] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
identifier[data] = identifier[_generate_contents] ( identifier[path] )
keyword[else] :
identifier[rel_path] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[path] , identifier[startpath] )
identifier[data] = identifier[dict] ( identifier[file] = identifier[rel_path] )
identifier[contents] [ identifier[safename] ]= identifier[data]
keyword[return] identifier[contents]
keyword[return] identifier[dict] (
identifier[contents] = identifier[_generate_contents] ( identifier[startpath] )
)
|
def generate_contents(startpath, outfilename=DEFAULT_BUILDFILE):
"""
Generate a build file (yaml) based on the contents of a
directory tree.
"""
def _ignored_name(name):
return name.startswith('.') or name == PACKAGE_DIR_NAME or name.endswith('~') or (name == outfilename)
def _generate_contents(dir_path):
safename_duplicates = defaultdict(list)
for name in os.listdir(dir_path):
if _ignored_name(name):
continue # depends on [control=['if'], data=[]]
path = os.path.join(dir_path, name)
if os.path.isdir(path):
nodename = name
ext = None # depends on [control=['if'], data=[]]
elif os.path.isfile(path):
(nodename, ext) = splitext_no_dot(name) # depends on [control=['if'], data=[]]
else:
continue
safename = to_identifier(nodename)
safename_duplicates[safename].append((name, nodename, ext)) # depends on [control=['for'], data=['name']]
safename_to_name = {}
for (safename, duplicates) in iteritems(safename_duplicates):
for (name, nodename, ext) in duplicates:
if len(duplicates) > 1 and ext:
new_safename = to_identifier(name) # Name with ext # depends on [control=['if'], data=[]]
else:
new_safename = safename
existing_name = safename_to_name.get(new_safename)
if existing_name is not None:
message = 'Duplicate node names in directory %r.'
message += ' %r was renamed to %r, which overlaps with %r'
raise BuildException(message % (dir_path, name, new_safename, existing_name)) # depends on [control=['if'], data=['existing_name']]
safename_to_name[new_safename] = name # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
contents = {}
for (safename, name) in iteritems(safename_to_name):
path = os.path.join(dir_path, name)
if os.path.isdir(path):
data = _generate_contents(path) # depends on [control=['if'], data=[]]
else:
rel_path = os.path.relpath(path, startpath)
data = dict(file=rel_path)
contents[safename] = data # depends on [control=['for'], data=[]]
return contents
return dict(contents=_generate_contents(startpath))
|
def retrieve(self, operation, field=None):
"""Retrieve a position in this collection.
:param operation: Name of an operation
:type operation: :class:`Operation`
:param field: Name of field for sort order
:type field: str
:return: The position for this operation
:rtype: Mark
:raises: NoTrackingCollection
"""
obj = self._get(operation, field)
if obj is None:
# empty Mark instance
return Mark(collection=self.collection, operation=operation, field=field)
return Mark.from_dict(self.collection, obj)
|
def function[retrieve, parameter[self, operation, field]]:
constant[Retrieve a position in this collection.
:param operation: Name of an operation
:type operation: :class:`Operation`
:param field: Name of field for sort order
:type field: str
:return: The position for this operation
:rtype: Mark
:raises: NoTrackingCollection
]
variable[obj] assign[=] call[name[self]._get, parameter[name[operation], name[field]]]
if compare[name[obj] is constant[None]] begin[:]
return[call[name[Mark], parameter[]]]
return[call[name[Mark].from_dict, parameter[name[self].collection, name[obj]]]]
|
keyword[def] identifier[retrieve] ( identifier[self] , identifier[operation] , identifier[field] = keyword[None] ):
literal[string]
identifier[obj] = identifier[self] . identifier[_get] ( identifier[operation] , identifier[field] )
keyword[if] identifier[obj] keyword[is] keyword[None] :
keyword[return] identifier[Mark] ( identifier[collection] = identifier[self] . identifier[collection] , identifier[operation] = identifier[operation] , identifier[field] = identifier[field] )
keyword[return] identifier[Mark] . identifier[from_dict] ( identifier[self] . identifier[collection] , identifier[obj] )
|
def retrieve(self, operation, field=None):
"""Retrieve a position in this collection.
:param operation: Name of an operation
:type operation: :class:`Operation`
:param field: Name of field for sort order
:type field: str
:return: The position for this operation
:rtype: Mark
:raises: NoTrackingCollection
"""
obj = self._get(operation, field)
if obj is None:
# empty Mark instance
return Mark(collection=self.collection, operation=operation, field=field) # depends on [control=['if'], data=[]]
return Mark.from_dict(self.collection, obj)
|
def connect_ex(self, addr):
"""
Call the :meth:`connect_ex` method of the underlying socket and set up
SSL on the socket, using the Context object supplied to this Connection
object at creation. Note that if the :meth:`connect_ex` method of the
socket doesn't return 0, SSL won't be initialized.
:param addr: A remove address
:return: What the socket's connect_ex method returns
"""
connect_ex = self._socket.connect_ex
self.set_connect_state()
return connect_ex(addr)
|
def function[connect_ex, parameter[self, addr]]:
constant[
Call the :meth:`connect_ex` method of the underlying socket and set up
SSL on the socket, using the Context object supplied to this Connection
object at creation. Note that if the :meth:`connect_ex` method of the
socket doesn't return 0, SSL won't be initialized.
:param addr: A remove address
:return: What the socket's connect_ex method returns
]
variable[connect_ex] assign[=] name[self]._socket.connect_ex
call[name[self].set_connect_state, parameter[]]
return[call[name[connect_ex], parameter[name[addr]]]]
|
keyword[def] identifier[connect_ex] ( identifier[self] , identifier[addr] ):
literal[string]
identifier[connect_ex] = identifier[self] . identifier[_socket] . identifier[connect_ex]
identifier[self] . identifier[set_connect_state] ()
keyword[return] identifier[connect_ex] ( identifier[addr] )
|
def connect_ex(self, addr):
"""
Call the :meth:`connect_ex` method of the underlying socket and set up
SSL on the socket, using the Context object supplied to this Connection
object at creation. Note that if the :meth:`connect_ex` method of the
socket doesn't return 0, SSL won't be initialized.
:param addr: A remove address
:return: What the socket's connect_ex method returns
"""
connect_ex = self._socket.connect_ex
self.set_connect_state()
return connect_ex(addr)
|
def emit(self,rlen=150):
"""Emit a read based on a source sequence"""
source_tx = self._source.emit()
source_read = self._cutter.cut(source_tx)
if self._flip and self.options.rand.random() < 0.5: source_read = source_read.rc()
srname = self.options.rand.uuid4()
seqfull = FASTQ('@'+self.options.rand.uuid4()+"\tlong\n"+str(source_read.sequence)+"\n+\n"+'I'*source_read.sequence.length+"\n")
seqperm1 = seqfull.copy()
seqperm2 = seqfull.copy()
for e in self.errors:
seqperm1 = e.permute(seqperm1)
seqperm2 = e.permute(seqperm2)
sleft = seqperm1[0:rlen]
sleft = FASTQ('@'+sleft.name+"\tleft\n"+sleft.sequence+"\n+\n"+sleft.qual+"\n")
sright = seqperm2.rc()[0:rlen]
sright = FASTQ('@'+sright.name+"\tright\n"+sright.sequence+"\n+\n"+sright.qual+"\n")
emission = TranscriptEmission(source_tx,
Source(source_read,
source_read.slice_sequence(0,rlen),
source_read.rc().slice_sequence(0,rlen)),
Read(seqperm1,
sleft,
sright
))
return emission
|
def function[emit, parameter[self, rlen]]:
constant[Emit a read based on a source sequence]
variable[source_tx] assign[=] call[name[self]._source.emit, parameter[]]
variable[source_read] assign[=] call[name[self]._cutter.cut, parameter[name[source_tx]]]
if <ast.BoolOp object at 0x7da1b0a214b0> begin[:]
variable[source_read] assign[=] call[name[source_read].rc, parameter[]]
variable[srname] assign[=] call[name[self].options.rand.uuid4, parameter[]]
variable[seqfull] assign[=] call[name[FASTQ], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[@] + call[name[self].options.rand.uuid4, parameter[]]] + constant[ long
]] + call[name[str], parameter[name[source_read].sequence]]] + constant[
+
]] + binary_operation[constant[I] * name[source_read].sequence.length]] + constant[
]]]]
variable[seqperm1] assign[=] call[name[seqfull].copy, parameter[]]
variable[seqperm2] assign[=] call[name[seqfull].copy, parameter[]]
for taget[name[e]] in starred[name[self].errors] begin[:]
variable[seqperm1] assign[=] call[name[e].permute, parameter[name[seqperm1]]]
variable[seqperm2] assign[=] call[name[e].permute, parameter[name[seqperm2]]]
variable[sleft] assign[=] call[name[seqperm1]][<ast.Slice object at 0x7da1b0a22260>]
variable[sleft] assign[=] call[name[FASTQ], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[@] + name[sleft].name] + constant[ left
]] + name[sleft].sequence] + constant[
+
]] + name[sleft].qual] + constant[
]]]]
variable[sright] assign[=] call[call[name[seqperm2].rc, parameter[]]][<ast.Slice object at 0x7da1b0a22d10>]
variable[sright] assign[=] call[name[FASTQ], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[@] + name[sright].name] + constant[ right
]] + name[sright].sequence] + constant[
+
]] + name[sright].qual] + constant[
]]]]
variable[emission] assign[=] call[name[TranscriptEmission], parameter[name[source_tx], call[name[Source], parameter[name[source_read], call[name[source_read].slice_sequence, parameter[constant[0], name[rlen]]], call[call[name[source_read].rc, parameter[]].slice_sequence, parameter[constant[0], name[rlen]]]]], call[name[Read], parameter[name[seqperm1], name[sleft], name[sright]]]]]
return[name[emission]]
|
keyword[def] identifier[emit] ( identifier[self] , identifier[rlen] = literal[int] ):
literal[string]
identifier[source_tx] = identifier[self] . identifier[_source] . identifier[emit] ()
identifier[source_read] = identifier[self] . identifier[_cutter] . identifier[cut] ( identifier[source_tx] )
keyword[if] identifier[self] . identifier[_flip] keyword[and] identifier[self] . identifier[options] . identifier[rand] . identifier[random] ()< literal[int] : identifier[source_read] = identifier[source_read] . identifier[rc] ()
identifier[srname] = identifier[self] . identifier[options] . identifier[rand] . identifier[uuid4] ()
identifier[seqfull] = identifier[FASTQ] ( literal[string] + identifier[self] . identifier[options] . identifier[rand] . identifier[uuid4] ()+ literal[string] + identifier[str] ( identifier[source_read] . identifier[sequence] )+ literal[string] + literal[string] * identifier[source_read] . identifier[sequence] . identifier[length] + literal[string] )
identifier[seqperm1] = identifier[seqfull] . identifier[copy] ()
identifier[seqperm2] = identifier[seqfull] . identifier[copy] ()
keyword[for] identifier[e] keyword[in] identifier[self] . identifier[errors] :
identifier[seqperm1] = identifier[e] . identifier[permute] ( identifier[seqperm1] )
identifier[seqperm2] = identifier[e] . identifier[permute] ( identifier[seqperm2] )
identifier[sleft] = identifier[seqperm1] [ literal[int] : identifier[rlen] ]
identifier[sleft] = identifier[FASTQ] ( literal[string] + identifier[sleft] . identifier[name] + literal[string] + identifier[sleft] . identifier[sequence] + literal[string] + identifier[sleft] . identifier[qual] + literal[string] )
identifier[sright] = identifier[seqperm2] . identifier[rc] ()[ literal[int] : identifier[rlen] ]
identifier[sright] = identifier[FASTQ] ( literal[string] + identifier[sright] . identifier[name] + literal[string] + identifier[sright] . identifier[sequence] + literal[string] + identifier[sright] . identifier[qual] + literal[string] )
identifier[emission] = identifier[TranscriptEmission] ( identifier[source_tx] ,
identifier[Source] ( identifier[source_read] ,
identifier[source_read] . identifier[slice_sequence] ( literal[int] , identifier[rlen] ),
identifier[source_read] . identifier[rc] (). identifier[slice_sequence] ( literal[int] , identifier[rlen] )),
identifier[Read] ( identifier[seqperm1] ,
identifier[sleft] ,
identifier[sright]
))
keyword[return] identifier[emission]
|
def emit(self, rlen=150):
"""Emit a read based on a source sequence"""
source_tx = self._source.emit()
source_read = self._cutter.cut(source_tx)
if self._flip and self.options.rand.random() < 0.5:
source_read = source_read.rc() # depends on [control=['if'], data=[]]
srname = self.options.rand.uuid4()
seqfull = FASTQ('@' + self.options.rand.uuid4() + '\tlong\n' + str(source_read.sequence) + '\n+\n' + 'I' * source_read.sequence.length + '\n')
seqperm1 = seqfull.copy()
seqperm2 = seqfull.copy()
for e in self.errors:
seqperm1 = e.permute(seqperm1)
seqperm2 = e.permute(seqperm2) # depends on [control=['for'], data=['e']]
sleft = seqperm1[0:rlen]
sleft = FASTQ('@' + sleft.name + '\tleft\n' + sleft.sequence + '\n+\n' + sleft.qual + '\n')
sright = seqperm2.rc()[0:rlen]
sright = FASTQ('@' + sright.name + '\tright\n' + sright.sequence + '\n+\n' + sright.qual + '\n')
emission = TranscriptEmission(source_tx, Source(source_read, source_read.slice_sequence(0, rlen), source_read.rc().slice_sequence(0, rlen)), Read(seqperm1, sleft, sright))
return emission
|
def login(self):
"""Login to the ZoneMinder API."""
_LOGGER.debug("Attempting to login to ZoneMinder")
login_post = {'view': 'console', 'action': 'login'}
if self._username:
login_post['username'] = self._username
if self._password:
login_post['password'] = self._password
req = requests.post(urljoin(self._server_url, 'index.php'),
data=login_post, verify=self._verify_ssl)
self._cookies = req.cookies
# Login calls returns a 200 response on both failure and success.
# The only way to tell if you logged in correctly is to issue an api
# call.
req = requests.get(
urljoin(self._server_url, 'api/host/getVersion.json'),
cookies=self._cookies,
timeout=ZoneMinder.DEFAULT_TIMEOUT,
verify=self._verify_ssl)
if not req.ok:
_LOGGER.error("Connection error logging into ZoneMinder")
return False
return True
|
def function[login, parameter[self]]:
constant[Login to the ZoneMinder API.]
call[name[_LOGGER].debug, parameter[constant[Attempting to login to ZoneMinder]]]
variable[login_post] assign[=] dictionary[[<ast.Constant object at 0x7da20e954220>, <ast.Constant object at 0x7da20e954bb0>], [<ast.Constant object at 0x7da20e957160>, <ast.Constant object at 0x7da20e9567a0>]]
if name[self]._username begin[:]
call[name[login_post]][constant[username]] assign[=] name[self]._username
if name[self]._password begin[:]
call[name[login_post]][constant[password]] assign[=] name[self]._password
variable[req] assign[=] call[name[requests].post, parameter[call[name[urljoin], parameter[name[self]._server_url, constant[index.php]]]]]
name[self]._cookies assign[=] name[req].cookies
variable[req] assign[=] call[name[requests].get, parameter[call[name[urljoin], parameter[name[self]._server_url, constant[api/host/getVersion.json]]]]]
if <ast.UnaryOp object at 0x7da20e954ee0> begin[:]
call[name[_LOGGER].error, parameter[constant[Connection error logging into ZoneMinder]]]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[login] ( identifier[self] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] )
identifier[login_post] ={ literal[string] : literal[string] , literal[string] : literal[string] }
keyword[if] identifier[self] . identifier[_username] :
identifier[login_post] [ literal[string] ]= identifier[self] . identifier[_username]
keyword[if] identifier[self] . identifier[_password] :
identifier[login_post] [ literal[string] ]= identifier[self] . identifier[_password]
identifier[req] = identifier[requests] . identifier[post] ( identifier[urljoin] ( identifier[self] . identifier[_server_url] , literal[string] ),
identifier[data] = identifier[login_post] , identifier[verify] = identifier[self] . identifier[_verify_ssl] )
identifier[self] . identifier[_cookies] = identifier[req] . identifier[cookies]
identifier[req] = identifier[requests] . identifier[get] (
identifier[urljoin] ( identifier[self] . identifier[_server_url] , literal[string] ),
identifier[cookies] = identifier[self] . identifier[_cookies] ,
identifier[timeout] = identifier[ZoneMinder] . identifier[DEFAULT_TIMEOUT] ,
identifier[verify] = identifier[self] . identifier[_verify_ssl] )
keyword[if] keyword[not] identifier[req] . identifier[ok] :
identifier[_LOGGER] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def login(self):
"""Login to the ZoneMinder API."""
_LOGGER.debug('Attempting to login to ZoneMinder')
login_post = {'view': 'console', 'action': 'login'}
if self._username:
login_post['username'] = self._username # depends on [control=['if'], data=[]]
if self._password:
login_post['password'] = self._password # depends on [control=['if'], data=[]]
req = requests.post(urljoin(self._server_url, 'index.php'), data=login_post, verify=self._verify_ssl)
self._cookies = req.cookies
# Login calls returns a 200 response on both failure and success.
# The only way to tell if you logged in correctly is to issue an api
# call.
req = requests.get(urljoin(self._server_url, 'api/host/getVersion.json'), cookies=self._cookies, timeout=ZoneMinder.DEFAULT_TIMEOUT, verify=self._verify_ssl)
if not req.ok:
_LOGGER.error('Connection error logging into ZoneMinder')
return False # depends on [control=['if'], data=[]]
return True
|
def createWindow(self,cls=None,caption_t=None,*args,**kwargs):
"""
createWindow(cls=window.PengWindow, *args, **kwargs)
Creates a new window using the supplied ``cls``\ .
If ``cls`` is not given, :py:class:`peng3d.window.PengWindow()` will be used.
Any other positional or keyword arguments are passed to the class constructor.
Note that this method currently does not support using multiple windows.
.. todo::
Implement having multiple windows.
"""
if cls is None:
from . import window
cls = window.PengWindow
if self.window is not None:
raise RuntimeError("Window already created!")
self.sendEvent("peng3d:window.create.pre",{"peng":self,"cls":cls})
if caption_t is not None:
kwargs["caption"] = "Peng3d Application"
self.window = cls(self,*args,**kwargs)
self.sendEvent("peng3d:window.create.post",{"peng":self,"window":self.window})
if self.cfg["rsrc.enable"] and self.resourceMgr is None:
self.sendEvent("peng3d:rsrc.init.pre",{"peng":self,"basepath":self.cfg["rsrc.basepath"]})
self.resourceMgr = resource.ResourceManager(self,self.cfg["rsrc.basepath"])
self.rsrcMgr = self.resourceMgr
self.sendEvent("peng3d:rsrc.init.post",{"peng":self,"rsrcMgr":self.resourceMgr})
if self.cfg["i18n.enable"] and self.i18n is None:
self.sendEvent("peng3d:i18n.init.pre",{"peng":self})
self.i18n = i18n.TranslationManager(self)
self._t = self.i18n.t
self._tl = self.i18n.tl
self.sendEvent("peng3d:i18n.init.post",{"peng":self,"i18n":self.i18n})
if caption_t is not None:
self.window.set_caption(self.t(caption_t))
def f():
self.window.set_caption(self.t(caption_t))
self.i18n.addAction("setlang",f)
return self.window
|
def function[createWindow, parameter[self, cls, caption_t]]:
constant[
createWindow(cls=window.PengWindow, *args, **kwargs)
Creates a new window using the supplied ``cls``\ .
If ``cls`` is not given, :py:class:`peng3d.window.PengWindow()` will be used.
Any other positional or keyword arguments are passed to the class constructor.
Note that this method currently does not support using multiple windows.
.. todo::
Implement having multiple windows.
]
if compare[name[cls] is constant[None]] begin[:]
from relative_module[None] import module[window]
variable[cls] assign[=] name[window].PengWindow
if compare[name[self].window is_not constant[None]] begin[:]
<ast.Raise object at 0x7da1b01690c0>
call[name[self].sendEvent, parameter[constant[peng3d:window.create.pre], dictionary[[<ast.Constant object at 0x7da1b0163b80>, <ast.Constant object at 0x7da1b0163790>], [<ast.Name object at 0x7da1b0163700>, <ast.Name object at 0x7da1b0163d30>]]]]
if compare[name[caption_t] is_not constant[None]] begin[:]
call[name[kwargs]][constant[caption]] assign[=] constant[Peng3d Application]
name[self].window assign[=] call[name[cls], parameter[name[self], <ast.Starred object at 0x7da1b0163970>]]
call[name[self].sendEvent, parameter[constant[peng3d:window.create.post], dictionary[[<ast.Constant object at 0x7da1b0163c40>, <ast.Constant object at 0x7da1b0163d60>], [<ast.Name object at 0x7da1b0163fd0>, <ast.Attribute object at 0x7da1b0163df0>]]]]
if <ast.BoolOp object at 0x7da1b0163bb0> begin[:]
call[name[self].sendEvent, parameter[constant[peng3d:rsrc.init.pre], dictionary[[<ast.Constant object at 0x7da1b01ff580>, <ast.Constant object at 0x7da1b01ffb20>], [<ast.Name object at 0x7da1b01fd750>, <ast.Subscript object at 0x7da1b01fdae0>]]]]
name[self].resourceMgr assign[=] call[name[resource].ResourceManager, parameter[name[self], call[name[self].cfg][constant[rsrc.basepath]]]]
name[self].rsrcMgr assign[=] name[self].resourceMgr
call[name[self].sendEvent, parameter[constant[peng3d:rsrc.init.post], dictionary[[<ast.Constant object at 0x7da1b02b9f60>, <ast.Constant object at 0x7da1b02b8e20>], [<ast.Name object at 0x7da1b02b8130>, <ast.Attribute object at 0x7da1b02b9960>]]]]
if <ast.BoolOp object at 0x7da1b02b92a0> begin[:]
call[name[self].sendEvent, parameter[constant[peng3d:i18n.init.pre], dictionary[[<ast.Constant object at 0x7da1b02108e0>], [<ast.Name object at 0x7da1b02138b0>]]]]
name[self].i18n assign[=] call[name[i18n].TranslationManager, parameter[name[self]]]
name[self]._t assign[=] name[self].i18n.t
name[self]._tl assign[=] name[self].i18n.tl
call[name[self].sendEvent, parameter[constant[peng3d:i18n.init.post], dictionary[[<ast.Constant object at 0x7da1b0213880>, <ast.Constant object at 0x7da1b0213760>], [<ast.Name object at 0x7da1b0213a90>, <ast.Attribute object at 0x7da1b0213d60>]]]]
if compare[name[caption_t] is_not constant[None]] begin[:]
call[name[self].window.set_caption, parameter[call[name[self].t, parameter[name[caption_t]]]]]
def function[f, parameter[]]:
call[name[self].window.set_caption, parameter[call[name[self].t, parameter[name[caption_t]]]]]
call[name[self].i18n.addAction, parameter[constant[setlang], name[f]]]
return[name[self].window]
|
keyword[def] identifier[createWindow] ( identifier[self] , identifier[cls] = keyword[None] , identifier[caption_t] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[cls] keyword[is] keyword[None] :
keyword[from] . keyword[import] identifier[window]
identifier[cls] = identifier[window] . identifier[PengWindow]
keyword[if] identifier[self] . identifier[window] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] , literal[string] : identifier[cls] })
keyword[if] identifier[caption_t] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ literal[string] ]= literal[string]
identifier[self] . identifier[window] = identifier[cls] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] , literal[string] : identifier[self] . identifier[window] })
keyword[if] identifier[self] . identifier[cfg] [ literal[string] ] keyword[and] identifier[self] . identifier[resourceMgr] keyword[is] keyword[None] :
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] , literal[string] : identifier[self] . identifier[cfg] [ literal[string] ]})
identifier[self] . identifier[resourceMgr] = identifier[resource] . identifier[ResourceManager] ( identifier[self] , identifier[self] . identifier[cfg] [ literal[string] ])
identifier[self] . identifier[rsrcMgr] = identifier[self] . identifier[resourceMgr]
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] , literal[string] : identifier[self] . identifier[resourceMgr] })
keyword[if] identifier[self] . identifier[cfg] [ literal[string] ] keyword[and] identifier[self] . identifier[i18n] keyword[is] keyword[None] :
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] })
identifier[self] . identifier[i18n] = identifier[i18n] . identifier[TranslationManager] ( identifier[self] )
identifier[self] . identifier[_t] = identifier[self] . identifier[i18n] . identifier[t]
identifier[self] . identifier[_tl] = identifier[self] . identifier[i18n] . identifier[tl]
identifier[self] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] , literal[string] : identifier[self] . identifier[i18n] })
keyword[if] identifier[caption_t] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[window] . identifier[set_caption] ( identifier[self] . identifier[t] ( identifier[caption_t] ))
keyword[def] identifier[f] ():
identifier[self] . identifier[window] . identifier[set_caption] ( identifier[self] . identifier[t] ( identifier[caption_t] ))
identifier[self] . identifier[i18n] . identifier[addAction] ( literal[string] , identifier[f] )
keyword[return] identifier[self] . identifier[window]
|
def createWindow(self, cls=None, caption_t=None, *args, **kwargs):
"""
createWindow(cls=window.PengWindow, *args, **kwargs)
Creates a new window using the supplied ``cls``\\ .
If ``cls`` is not given, :py:class:`peng3d.window.PengWindow()` will be used.
Any other positional or keyword arguments are passed to the class constructor.
Note that this method currently does not support using multiple windows.
.. todo::
Implement having multiple windows.
"""
if cls is None:
from . import window
cls = window.PengWindow # depends on [control=['if'], data=['cls']]
if self.window is not None:
raise RuntimeError('Window already created!') # depends on [control=['if'], data=[]]
self.sendEvent('peng3d:window.create.pre', {'peng': self, 'cls': cls})
if caption_t is not None:
kwargs['caption'] = 'Peng3d Application' # depends on [control=['if'], data=[]]
self.window = cls(self, *args, **kwargs)
self.sendEvent('peng3d:window.create.post', {'peng': self, 'window': self.window})
if self.cfg['rsrc.enable'] and self.resourceMgr is None:
self.sendEvent('peng3d:rsrc.init.pre', {'peng': self, 'basepath': self.cfg['rsrc.basepath']})
self.resourceMgr = resource.ResourceManager(self, self.cfg['rsrc.basepath'])
self.rsrcMgr = self.resourceMgr
self.sendEvent('peng3d:rsrc.init.post', {'peng': self, 'rsrcMgr': self.resourceMgr})
if self.cfg['i18n.enable'] and self.i18n is None:
self.sendEvent('peng3d:i18n.init.pre', {'peng': self})
self.i18n = i18n.TranslationManager(self)
self._t = self.i18n.t
self._tl = self.i18n.tl
self.sendEvent('peng3d:i18n.init.post', {'peng': self, 'i18n': self.i18n}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if caption_t is not None:
self.window.set_caption(self.t(caption_t))
def f():
self.window.set_caption(self.t(caption_t))
self.i18n.addAction('setlang', f) # depends on [control=['if'], data=['caption_t']]
return self.window
|
def get_keywords_from_local_file(
local_file, taxonomy_name, output_mode="text",
output_limit=None, spires=False,
match_mode="full", no_cache=False, with_author_keywords=False,
rebuild_cache=False, only_core_tags=False, extract_acronyms=False):
"""Output keywords reading a local file.
Arguments and output are the same as for :see: get_keywords_from_text().
"""
if output_limit is None:
output_limit = current_app.config['CLASSIFIER_DEFAULT_OUTPUT_NUMBER']
current_app.logger.info(
"Analyzing keywords for local file %s." % local_file)
text_lines = text_lines_from_local_file(local_file)
return get_keywords_from_text(text_lines,
taxonomy_name,
output_mode=output_mode,
output_limit=output_limit,
spires=spires,
match_mode=match_mode,
no_cache=no_cache,
with_author_keywords=with_author_keywords,
rebuild_cache=rebuild_cache,
only_core_tags=only_core_tags,
extract_acronyms=extract_acronyms)
|
def function[get_keywords_from_local_file, parameter[local_file, taxonomy_name, output_mode, output_limit, spires, match_mode, no_cache, with_author_keywords, rebuild_cache, only_core_tags, extract_acronyms]]:
constant[Output keywords reading a local file.
Arguments and output are the same as for :see: get_keywords_from_text().
]
if compare[name[output_limit] is constant[None]] begin[:]
variable[output_limit] assign[=] call[name[current_app].config][constant[CLASSIFIER_DEFAULT_OUTPUT_NUMBER]]
call[name[current_app].logger.info, parameter[binary_operation[constant[Analyzing keywords for local file %s.] <ast.Mod object at 0x7da2590d6920> name[local_file]]]]
variable[text_lines] assign[=] call[name[text_lines_from_local_file], parameter[name[local_file]]]
return[call[name[get_keywords_from_text], parameter[name[text_lines], name[taxonomy_name]]]]
|
keyword[def] identifier[get_keywords_from_local_file] (
identifier[local_file] , identifier[taxonomy_name] , identifier[output_mode] = literal[string] ,
identifier[output_limit] = keyword[None] , identifier[spires] = keyword[False] ,
identifier[match_mode] = literal[string] , identifier[no_cache] = keyword[False] , identifier[with_author_keywords] = keyword[False] ,
identifier[rebuild_cache] = keyword[False] , identifier[only_core_tags] = keyword[False] , identifier[extract_acronyms] = keyword[False] ):
literal[string]
keyword[if] identifier[output_limit] keyword[is] keyword[None] :
identifier[output_limit] = identifier[current_app] . identifier[config] [ literal[string] ]
identifier[current_app] . identifier[logger] . identifier[info] (
literal[string] % identifier[local_file] )
identifier[text_lines] = identifier[text_lines_from_local_file] ( identifier[local_file] )
keyword[return] identifier[get_keywords_from_text] ( identifier[text_lines] ,
identifier[taxonomy_name] ,
identifier[output_mode] = identifier[output_mode] ,
identifier[output_limit] = identifier[output_limit] ,
identifier[spires] = identifier[spires] ,
identifier[match_mode] = identifier[match_mode] ,
identifier[no_cache] = identifier[no_cache] ,
identifier[with_author_keywords] = identifier[with_author_keywords] ,
identifier[rebuild_cache] = identifier[rebuild_cache] ,
identifier[only_core_tags] = identifier[only_core_tags] ,
identifier[extract_acronyms] = identifier[extract_acronyms] )
|
def get_keywords_from_local_file(local_file, taxonomy_name, output_mode='text', output_limit=None, spires=False, match_mode='full', no_cache=False, with_author_keywords=False, rebuild_cache=False, only_core_tags=False, extract_acronyms=False):
"""Output keywords reading a local file.
Arguments and output are the same as for :see: get_keywords_from_text().
"""
if output_limit is None:
output_limit = current_app.config['CLASSIFIER_DEFAULT_OUTPUT_NUMBER'] # depends on [control=['if'], data=['output_limit']]
current_app.logger.info('Analyzing keywords for local file %s.' % local_file)
text_lines = text_lines_from_local_file(local_file)
return get_keywords_from_text(text_lines, taxonomy_name, output_mode=output_mode, output_limit=output_limit, spires=spires, match_mode=match_mode, no_cache=no_cache, with_author_keywords=with_author_keywords, rebuild_cache=rebuild_cache, only_core_tags=only_core_tags, extract_acronyms=extract_acronyms)
|
def overlay(repository, files, version, debug=False):
"""
Overlay files from the specified repository/version into the given
directory and return None.
:param repository: A string containing the path to the repository to be
extracted.
:param files: A list of `FileConfig` objects.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
"""
with util.saved_cwd():
os.chdir(repository)
_get_version(version, debug)
for fc in files:
if '*' in fc.src:
for filename in glob.glob(fc.src):
util.copy(filename, fc.dst)
msg = ' - copied ({}) {} to {}'.format(
version, filename, fc.dst)
util.print_info(msg)
else:
if os.path.isdir(fc.dst) and os.path.isdir(fc.src):
shutil.rmtree(fc.dst)
util.copy(fc.src, fc.dst)
msg = ' - copied ({}) {} to {}'.format(
version, fc.src, fc.dst)
util.print_info(msg)
|
def function[overlay, parameter[repository, files, version, debug]]:
constant[
Overlay files from the specified repository/version into the given
directory and return None.
:param repository: A string containing the path to the repository to be
extracted.
:param files: A list of `FileConfig` objects.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
]
with call[name[util].saved_cwd, parameter[]] begin[:]
call[name[os].chdir, parameter[name[repository]]]
call[name[_get_version], parameter[name[version], name[debug]]]
for taget[name[fc]] in starred[name[files]] begin[:]
if compare[constant[*] in name[fc].src] begin[:]
for taget[name[filename]] in starred[call[name[glob].glob, parameter[name[fc].src]]] begin[:]
call[name[util].copy, parameter[name[filename], name[fc].dst]]
variable[msg] assign[=] call[constant[ - copied ({}) {} to {}].format, parameter[name[version], name[filename], name[fc].dst]]
call[name[util].print_info, parameter[name[msg]]]
|
keyword[def] identifier[overlay] ( identifier[repository] , identifier[files] , identifier[version] , identifier[debug] = keyword[False] ):
literal[string]
keyword[with] identifier[util] . identifier[saved_cwd] ():
identifier[os] . identifier[chdir] ( identifier[repository] )
identifier[_get_version] ( identifier[version] , identifier[debug] )
keyword[for] identifier[fc] keyword[in] identifier[files] :
keyword[if] literal[string] keyword[in] identifier[fc] . identifier[src] :
keyword[for] identifier[filename] keyword[in] identifier[glob] . identifier[glob] ( identifier[fc] . identifier[src] ):
identifier[util] . identifier[copy] ( identifier[filename] , identifier[fc] . identifier[dst] )
identifier[msg] = literal[string] . identifier[format] (
identifier[version] , identifier[filename] , identifier[fc] . identifier[dst] )
identifier[util] . identifier[print_info] ( identifier[msg] )
keyword[else] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[fc] . identifier[dst] ) keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[fc] . identifier[src] ):
identifier[shutil] . identifier[rmtree] ( identifier[fc] . identifier[dst] )
identifier[util] . identifier[copy] ( identifier[fc] . identifier[src] , identifier[fc] . identifier[dst] )
identifier[msg] = literal[string] . identifier[format] (
identifier[version] , identifier[fc] . identifier[src] , identifier[fc] . identifier[dst] )
identifier[util] . identifier[print_info] ( identifier[msg] )
|
def overlay(repository, files, version, debug=False):
"""
Overlay files from the specified repository/version into the given
directory and return None.
:param repository: A string containing the path to the repository to be
extracted.
:param files: A list of `FileConfig` objects.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
"""
with util.saved_cwd():
os.chdir(repository)
_get_version(version, debug)
for fc in files:
if '*' in fc.src:
for filename in glob.glob(fc.src):
util.copy(filename, fc.dst)
msg = ' - copied ({}) {} to {}'.format(version, filename, fc.dst)
util.print_info(msg) # depends on [control=['for'], data=['filename']] # depends on [control=['if'], data=[]]
else:
if os.path.isdir(fc.dst) and os.path.isdir(fc.src):
shutil.rmtree(fc.dst) # depends on [control=['if'], data=[]]
util.copy(fc.src, fc.dst)
msg = ' - copied ({}) {} to {}'.format(version, fc.src, fc.dst)
util.print_info(msg) # depends on [control=['for'], data=['fc']] # depends on [control=['with'], data=[]]
|
def _set_last_rcvd_interface(self, v, load=False):
"""
Setter method for last_rcvd_interface, mapped from YANG variable /brocade_interface_ext_rpc/get_interface_detail/input/last_rcvd_interface (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_last_rcvd_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_last_rcvd_interface() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=last_rcvd_interface.last_rcvd_interface, is_container='container', presence=False, yang_name="last-rcvd-interface", rest_name="last-rcvd-interface", parent=self, choice=(u'request-type', u'get-next-request'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """last_rcvd_interface must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=last_rcvd_interface.last_rcvd_interface, is_container='container', presence=False, yang_name="last-rcvd-interface", rest_name="last-rcvd-interface", parent=self, choice=(u'request-type', u'get-next-request'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='container', is_config=True)""",
})
self.__last_rcvd_interface = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_last_rcvd_interface, parameter[self, v, load]]:
constant[
Setter method for last_rcvd_interface, mapped from YANG variable /brocade_interface_ext_rpc/get_interface_detail/input/last_rcvd_interface (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_last_rcvd_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_last_rcvd_interface() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da20c6c6680>
name[self].__last_rcvd_interface assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_last_rcvd_interface] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[last_rcvd_interface] . identifier[last_rcvd_interface] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[choice] =( literal[string] , literal[string] ), identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] = keyword[None] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__last_rcvd_interface] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_last_rcvd_interface(self, v, load=False):
"""
Setter method for last_rcvd_interface, mapped from YANG variable /brocade_interface_ext_rpc/get_interface_detail/input/last_rcvd_interface (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_last_rcvd_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_last_rcvd_interface() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=last_rcvd_interface.last_rcvd_interface, is_container='container', presence=False, yang_name='last-rcvd-interface', rest_name='last-rcvd-interface', parent=self, choice=(u'request-type', u'get-next-request'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'last_rcvd_interface must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=last_rcvd_interface.last_rcvd_interface, is_container=\'container\', presence=False, yang_name="last-rcvd-interface", rest_name="last-rcvd-interface", parent=self, choice=(u\'request-type\', u\'get-next-request\'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace=\'urn:brocade.com:mgmt:brocade-interface-ext\', defining_module=\'brocade-interface-ext\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__last_rcvd_interface = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def base(number, input_base=10, output_base=10, max_depth=10,
string=False, recurring=True):
"""
Converts a number from any base to any another.
Args:
number(tuple|str|int): The number to convert.
input_base(int): The base to convert from (defualt 10).
output_base(int): The base to convert to (default 10).
max_depth(int): The maximum number of fractional digits (defult 10).
string(bool): If True output will be in string representation,
if False output will be in tuple representation (defult False).
recurring(bool): Attempt to find repeating digits in the fractional
part of a number. Repeated digits will be enclosed with "[" and "]"
(default True).
Returns:
A tuple of digits in the specified base:
(int, int, int, ... , '.' , int, int, int)
If the string flag is set to True,
a string representation will be used instead.
Raises:
ValueError if a digit value is too high for the input_base.
Example:
>>> base((1,9,6,'.',5,1,6), 17, 20)
(1, 2, 8, '.', 5, 19, 10, 7, 17, 2, 13, 13, 1, 8)
"""
# Convert number to tuple representation.
if type(number) == int or type(number) == float:
number = str(number)
if type(number) == str:
number = represent_as_tuple(number)
# Check that the number is valid for the input base.
if not check_valid(number, input_base):
raise ValueError
# Deal with base-1 special case
if input_base == 1:
number = (1,) * number.count(1)
# Expand any recurring digits.
number = expand_recurring(number, repeat=5)
# Convert a fractional number.
if "." in number:
radix_point = number.index(".")
integer_part = number[:radix_point]
fractional_part = number[radix_point:]
integer_part = integer_base(integer_part, input_base, output_base)
fractional_part = fractional_base(fractional_part, input_base,
output_base, max_depth)
number = integer_part + fractional_part
number = truncate(number)
# Convert an integer number.
else:
number = integer_base(number, input_base, output_base)
if recurring:
number = find_recurring(number, min_repeat=2)
# Return the converted number as a srring or tuple.
return represent_as_string(number) if string else number
|
def function[base, parameter[number, input_base, output_base, max_depth, string, recurring]]:
constant[
Converts a number from any base to any another.
Args:
number(tuple|str|int): The number to convert.
input_base(int): The base to convert from (defualt 10).
output_base(int): The base to convert to (default 10).
max_depth(int): The maximum number of fractional digits (defult 10).
string(bool): If True output will be in string representation,
if False output will be in tuple representation (defult False).
recurring(bool): Attempt to find repeating digits in the fractional
part of a number. Repeated digits will be enclosed with "[" and "]"
(default True).
Returns:
A tuple of digits in the specified base:
(int, int, int, ... , '.' , int, int, int)
If the string flag is set to True,
a string representation will be used instead.
Raises:
ValueError if a digit value is too high for the input_base.
Example:
>>> base((1,9,6,'.',5,1,6), 17, 20)
(1, 2, 8, '.', 5, 19, 10, 7, 17, 2, 13, 13, 1, 8)
]
if <ast.BoolOp object at 0x7da1b0b646d0> begin[:]
variable[number] assign[=] call[name[str], parameter[name[number]]]
if compare[call[name[type], parameter[name[number]]] equal[==] name[str]] begin[:]
variable[number] assign[=] call[name[represent_as_tuple], parameter[name[number]]]
if <ast.UnaryOp object at 0x7da1b0b673d0> begin[:]
<ast.Raise object at 0x7da1b0b65b70>
if compare[name[input_base] equal[==] constant[1]] begin[:]
variable[number] assign[=] binary_operation[tuple[[<ast.Constant object at 0x7da1b0b64a30>]] * call[name[number].count, parameter[constant[1]]]]
variable[number] assign[=] call[name[expand_recurring], parameter[name[number]]]
if compare[constant[.] in name[number]] begin[:]
variable[radix_point] assign[=] call[name[number].index, parameter[constant[.]]]
variable[integer_part] assign[=] call[name[number]][<ast.Slice object at 0x7da1b0b66fb0>]
variable[fractional_part] assign[=] call[name[number]][<ast.Slice object at 0x7da1b0b677c0>]
variable[integer_part] assign[=] call[name[integer_base], parameter[name[integer_part], name[input_base], name[output_base]]]
variable[fractional_part] assign[=] call[name[fractional_base], parameter[name[fractional_part], name[input_base], name[output_base], name[max_depth]]]
variable[number] assign[=] binary_operation[name[integer_part] + name[fractional_part]]
variable[number] assign[=] call[name[truncate], parameter[name[number]]]
if name[recurring] begin[:]
variable[number] assign[=] call[name[find_recurring], parameter[name[number]]]
return[<ast.IfExp object at 0x7da1b0b653f0>]
|
keyword[def] identifier[base] ( identifier[number] , identifier[input_base] = literal[int] , identifier[output_base] = literal[int] , identifier[max_depth] = literal[int] ,
identifier[string] = keyword[False] , identifier[recurring] = keyword[True] ):
literal[string]
keyword[if] identifier[type] ( identifier[number] )== identifier[int] keyword[or] identifier[type] ( identifier[number] )== identifier[float] :
identifier[number] = identifier[str] ( identifier[number] )
keyword[if] identifier[type] ( identifier[number] )== identifier[str] :
identifier[number] = identifier[represent_as_tuple] ( identifier[number] )
keyword[if] keyword[not] identifier[check_valid] ( identifier[number] , identifier[input_base] ):
keyword[raise] identifier[ValueError]
keyword[if] identifier[input_base] == literal[int] :
identifier[number] =( literal[int] ,)* identifier[number] . identifier[count] ( literal[int] )
identifier[number] = identifier[expand_recurring] ( identifier[number] , identifier[repeat] = literal[int] )
keyword[if] literal[string] keyword[in] identifier[number] :
identifier[radix_point] = identifier[number] . identifier[index] ( literal[string] )
identifier[integer_part] = identifier[number] [: identifier[radix_point] ]
identifier[fractional_part] = identifier[number] [ identifier[radix_point] :]
identifier[integer_part] = identifier[integer_base] ( identifier[integer_part] , identifier[input_base] , identifier[output_base] )
identifier[fractional_part] = identifier[fractional_base] ( identifier[fractional_part] , identifier[input_base] ,
identifier[output_base] , identifier[max_depth] )
identifier[number] = identifier[integer_part] + identifier[fractional_part]
identifier[number] = identifier[truncate] ( identifier[number] )
keyword[else] :
identifier[number] = identifier[integer_base] ( identifier[number] , identifier[input_base] , identifier[output_base] )
keyword[if] identifier[recurring] :
identifier[number] = identifier[find_recurring] ( identifier[number] , identifier[min_repeat] = literal[int] )
keyword[return] identifier[represent_as_string] ( identifier[number] ) keyword[if] identifier[string] keyword[else] identifier[number]
|
def base(number, input_base=10, output_base=10, max_depth=10, string=False, recurring=True):
"""
Converts a number from any base to any another.
Args:
number(tuple|str|int): The number to convert.
input_base(int): The base to convert from (defualt 10).
output_base(int): The base to convert to (default 10).
max_depth(int): The maximum number of fractional digits (defult 10).
string(bool): If True output will be in string representation,
if False output will be in tuple representation (defult False).
recurring(bool): Attempt to find repeating digits in the fractional
part of a number. Repeated digits will be enclosed with "[" and "]"
(default True).
Returns:
A tuple of digits in the specified base:
(int, int, int, ... , '.' , int, int, int)
If the string flag is set to True,
a string representation will be used instead.
Raises:
ValueError if a digit value is too high for the input_base.
Example:
>>> base((1,9,6,'.',5,1,6), 17, 20)
(1, 2, 8, '.', 5, 19, 10, 7, 17, 2, 13, 13, 1, 8)
""" # Convert number to tuple representation.
if type(number) == int or type(number) == float:
number = str(number) # depends on [control=['if'], data=[]]
if type(number) == str:
number = represent_as_tuple(number) # depends on [control=['if'], data=[]] # Check that the number is valid for the input base.
if not check_valid(number, input_base):
raise ValueError # depends on [control=['if'], data=[]] # Deal with base-1 special case
if input_base == 1:
number = (1,) * number.count(1) # depends on [control=['if'], data=[]] # Expand any recurring digits.
number = expand_recurring(number, repeat=5) # Convert a fractional number.
if '.' in number:
radix_point = number.index('.')
integer_part = number[:radix_point]
fractional_part = number[radix_point:]
integer_part = integer_base(integer_part, input_base, output_base)
fractional_part = fractional_base(fractional_part, input_base, output_base, max_depth)
number = integer_part + fractional_part
number = truncate(number) # depends on [control=['if'], data=['number']]
else: # Convert an integer number.
number = integer_base(number, input_base, output_base)
if recurring:
number = find_recurring(number, min_repeat=2) # depends on [control=['if'], data=[]] # Return the converted number as a srring or tuple.
return represent_as_string(number) if string else number
|
def pseudo_inv_components(self,maxsing=None,eigthresh=1.0e-5,truncate=True):
""" Get the (optionally) truncated SVD components
Parameters
----------
maxsing : int
the number of singular components to use. If None,
maxsing is calculated using Matrix.get_maxsing() and eigthresh
eigthresh : float
the ratio of largest to smallest singular components to use
for truncation. Ignored if maxsing is not None
truncate : bool
flag to truncate components. If False, U, s, and V will be zeroed out instead of truncated.
Default is True
Returns
-------
u : Matrix
(optionally) truncated left singular vectors
s : Matrix
(optionally) truncated singular value matrix
v : Matrix
(optionally) truncated right singular vectors
"""
if maxsing is None:
maxsing = self.get_maxsing(eigthresh=eigthresh)
else:
maxsing = min(self.get_maxsing(eigthresh=eigthresh),maxsing)
s = self.full_s.copy()
v = self.v.copy()
u = self.u.copy()
if truncate:
s = s[:maxsing,:maxsing]
v = v[:,:maxsing]
u = u[:,:maxsing]
else:
new_s = self.full_s.copy()
s = new_s
s.x[maxsing:, maxsing:] = 0.0
v.x[:, maxsing:] = 0.0
u.x[:, maxsing:] = 0.0
return u,s,v
|
def function[pseudo_inv_components, parameter[self, maxsing, eigthresh, truncate]]:
constant[ Get the (optionally) truncated SVD components
Parameters
----------
maxsing : int
the number of singular components to use. If None,
maxsing is calculated using Matrix.get_maxsing() and eigthresh
eigthresh : float
the ratio of largest to smallest singular components to use
for truncation. Ignored if maxsing is not None
truncate : bool
flag to truncate components. If False, U, s, and V will be zeroed out instead of truncated.
Default is True
Returns
-------
u : Matrix
(optionally) truncated left singular vectors
s : Matrix
(optionally) truncated singular value matrix
v : Matrix
(optionally) truncated right singular vectors
]
if compare[name[maxsing] is constant[None]] begin[:]
variable[maxsing] assign[=] call[name[self].get_maxsing, parameter[]]
variable[s] assign[=] call[name[self].full_s.copy, parameter[]]
variable[v] assign[=] call[name[self].v.copy, parameter[]]
variable[u] assign[=] call[name[self].u.copy, parameter[]]
if name[truncate] begin[:]
variable[s] assign[=] call[name[s]][tuple[[<ast.Slice object at 0x7da1b230a5c0>, <ast.Slice object at 0x7da1b23098d0>]]]
variable[v] assign[=] call[name[v]][tuple[[<ast.Slice object at 0x7da1b230a500>, <ast.Slice object at 0x7da1b230b9a0>]]]
variable[u] assign[=] call[name[u]][tuple[[<ast.Slice object at 0x7da1b230a080>, <ast.Slice object at 0x7da1b2309180>]]]
return[tuple[[<ast.Name object at 0x7da1b244ffd0>, <ast.Name object at 0x7da1b244f1f0>, <ast.Name object at 0x7da1b244e080>]]]
|
keyword[def] identifier[pseudo_inv_components] ( identifier[self] , identifier[maxsing] = keyword[None] , identifier[eigthresh] = literal[int] , identifier[truncate] = keyword[True] ):
literal[string]
keyword[if] identifier[maxsing] keyword[is] keyword[None] :
identifier[maxsing] = identifier[self] . identifier[get_maxsing] ( identifier[eigthresh] = identifier[eigthresh] )
keyword[else] :
identifier[maxsing] = identifier[min] ( identifier[self] . identifier[get_maxsing] ( identifier[eigthresh] = identifier[eigthresh] ), identifier[maxsing] )
identifier[s] = identifier[self] . identifier[full_s] . identifier[copy] ()
identifier[v] = identifier[self] . identifier[v] . identifier[copy] ()
identifier[u] = identifier[self] . identifier[u] . identifier[copy] ()
keyword[if] identifier[truncate] :
identifier[s] = identifier[s] [: identifier[maxsing] ,: identifier[maxsing] ]
identifier[v] = identifier[v] [:,: identifier[maxsing] ]
identifier[u] = identifier[u] [:,: identifier[maxsing] ]
keyword[else] :
identifier[new_s] = identifier[self] . identifier[full_s] . identifier[copy] ()
identifier[s] = identifier[new_s]
identifier[s] . identifier[x] [ identifier[maxsing] :, identifier[maxsing] :]= literal[int]
identifier[v] . identifier[x] [:, identifier[maxsing] :]= literal[int]
identifier[u] . identifier[x] [:, identifier[maxsing] :]= literal[int]
keyword[return] identifier[u] , identifier[s] , identifier[v]
|
def pseudo_inv_components(self, maxsing=None, eigthresh=1e-05, truncate=True):
""" Get the (optionally) truncated SVD components
Parameters
----------
maxsing : int
the number of singular components to use. If None,
maxsing is calculated using Matrix.get_maxsing() and eigthresh
eigthresh : float
the ratio of largest to smallest singular components to use
for truncation. Ignored if maxsing is not None
truncate : bool
flag to truncate components. If False, U, s, and V will be zeroed out instead of truncated.
Default is True
Returns
-------
u : Matrix
(optionally) truncated left singular vectors
s : Matrix
(optionally) truncated singular value matrix
v : Matrix
(optionally) truncated right singular vectors
"""
if maxsing is None:
maxsing = self.get_maxsing(eigthresh=eigthresh) # depends on [control=['if'], data=['maxsing']]
else:
maxsing = min(self.get_maxsing(eigthresh=eigthresh), maxsing)
s = self.full_s.copy()
v = self.v.copy()
u = self.u.copy()
if truncate:
s = s[:maxsing, :maxsing]
v = v[:, :maxsing]
u = u[:, :maxsing] # depends on [control=['if'], data=[]]
else:
new_s = self.full_s.copy()
s = new_s
s.x[maxsing:, maxsing:] = 0.0
v.x[:, maxsing:] = 0.0
u.x[:, maxsing:] = 0.0
return (u, s, v)
|
def _query(function,
consul_url,
token=None,
method='GET',
api_version='v1',
data=None,
query_params=None):
'''
Consul object method function to construct and execute on the API URL.
:param api_url: The Consul api url.
:param api_version The Consul api version
:param function: The Consul api function to perform.
:param method: The HTTP method, e.g. GET or POST.
:param data: The data to be sent for POST method. This param is ignored for GET requests.
:return: The json response from the API call or False.
'''
if not query_params:
query_params = {}
ret = {'data': '',
'res': True}
if not token:
token = _get_token()
headers = {"X-Consul-Token": token, "Content-Type": "application/json"}
base_url = urllib.parse.urljoin(consul_url, '{0}/'.format(api_version))
url = urllib.parse.urljoin(base_url, function, False)
if method == 'GET':
data = None
else:
if data is None:
data = {}
data = salt.utils.json.dumps(data)
result = salt.utils.http.query(
url,
method=method,
params=query_params,
data=data,
decode=True,
status=True,
header_dict=headers,
opts=__opts__,
)
if result.get('status', None) == http_client.OK:
ret['data'] = result.get('dict', result)
ret['res'] = True
elif result.get('status', None) == http_client.NO_CONTENT:
ret['res'] = False
elif result.get('status', None) == http_client.NOT_FOUND:
ret['data'] = 'Key not found.'
ret['res'] = False
else:
if result:
ret['data'] = result
ret['res'] = True
else:
ret['res'] = False
return ret
|
def function[_query, parameter[function, consul_url, token, method, api_version, data, query_params]]:
constant[
Consul object method function to construct and execute on the API URL.
:param api_url: The Consul api url.
:param api_version The Consul api version
:param function: The Consul api function to perform.
:param method: The HTTP method, e.g. GET or POST.
:param data: The data to be sent for POST method. This param is ignored for GET requests.
:return: The json response from the API call or False.
]
if <ast.UnaryOp object at 0x7da1b1c87c10> begin[:]
variable[query_params] assign[=] dictionary[[], []]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c87a90>, <ast.Constant object at 0x7da1b1c87a60>], [<ast.Constant object at 0x7da1b1c87a30>, <ast.Constant object at 0x7da1b1c87a00>]]
if <ast.UnaryOp object at 0x7da1b1c879a0> begin[:]
variable[token] assign[=] call[name[_get_token], parameter[]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c877f0>, <ast.Constant object at 0x7da1b1c877c0>], [<ast.Name object at 0x7da1b1c87790>, <ast.Constant object at 0x7da1b1c87760>]]
variable[base_url] assign[=] call[name[urllib].parse.urljoin, parameter[name[consul_url], call[constant[{0}/].format, parameter[name[api_version]]]]]
variable[url] assign[=] call[name[urllib].parse.urljoin, parameter[name[base_url], name[function], constant[False]]]
if compare[name[method] equal[==] constant[GET]] begin[:]
variable[data] assign[=] constant[None]
variable[result] assign[=] call[name[salt].utils.http.query, parameter[name[url]]]
if compare[call[name[result].get, parameter[constant[status], constant[None]]] equal[==] name[http_client].OK] begin[:]
call[name[ret]][constant[data]] assign[=] call[name[result].get, parameter[constant[dict], name[result]]]
call[name[ret]][constant[res]] assign[=] constant[True]
return[name[ret]]
|
keyword[def] identifier[_query] ( identifier[function] ,
identifier[consul_url] ,
identifier[token] = keyword[None] ,
identifier[method] = literal[string] ,
identifier[api_version] = literal[string] ,
identifier[data] = keyword[None] ,
identifier[query_params] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[query_params] :
identifier[query_params] ={}
identifier[ret] ={ literal[string] : literal[string] ,
literal[string] : keyword[True] }
keyword[if] keyword[not] identifier[token] :
identifier[token] = identifier[_get_token] ()
identifier[headers] ={ literal[string] : identifier[token] , literal[string] : literal[string] }
identifier[base_url] = identifier[urllib] . identifier[parse] . identifier[urljoin] ( identifier[consul_url] , literal[string] . identifier[format] ( identifier[api_version] ))
identifier[url] = identifier[urllib] . identifier[parse] . identifier[urljoin] ( identifier[base_url] , identifier[function] , keyword[False] )
keyword[if] identifier[method] == literal[string] :
identifier[data] = keyword[None]
keyword[else] :
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] ={}
identifier[data] = identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[data] )
identifier[result] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] (
identifier[url] ,
identifier[method] = identifier[method] ,
identifier[params] = identifier[query_params] ,
identifier[data] = identifier[data] ,
identifier[decode] = keyword[True] ,
identifier[status] = keyword[True] ,
identifier[header_dict] = identifier[headers] ,
identifier[opts] = identifier[__opts__] ,
)
keyword[if] identifier[result] . identifier[get] ( literal[string] , keyword[None] )== identifier[http_client] . identifier[OK] :
identifier[ret] [ literal[string] ]= identifier[result] . identifier[get] ( literal[string] , identifier[result] )
identifier[ret] [ literal[string] ]= keyword[True]
keyword[elif] identifier[result] . identifier[get] ( literal[string] , keyword[None] )== identifier[http_client] . identifier[NO_CONTENT] :
identifier[ret] [ literal[string] ]= keyword[False]
keyword[elif] identifier[result] . identifier[get] ( literal[string] , keyword[None] )== identifier[http_client] . identifier[NOT_FOUND] :
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[False]
keyword[else] :
keyword[if] identifier[result] :
identifier[ret] [ literal[string] ]= identifier[result]
identifier[ret] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret]
|
def _query(function, consul_url, token=None, method='GET', api_version='v1', data=None, query_params=None):
"""
Consul object method function to construct and execute on the API URL.
:param api_url: The Consul api url.
:param api_version The Consul api version
:param function: The Consul api function to perform.
:param method: The HTTP method, e.g. GET or POST.
:param data: The data to be sent for POST method. This param is ignored for GET requests.
:return: The json response from the API call or False.
"""
if not query_params:
query_params = {} # depends on [control=['if'], data=[]]
ret = {'data': '', 'res': True}
if not token:
token = _get_token() # depends on [control=['if'], data=[]]
headers = {'X-Consul-Token': token, 'Content-Type': 'application/json'}
base_url = urllib.parse.urljoin(consul_url, '{0}/'.format(api_version))
url = urllib.parse.urljoin(base_url, function, False)
if method == 'GET':
data = None # depends on [control=['if'], data=[]]
else:
if data is None:
data = {} # depends on [control=['if'], data=['data']]
data = salt.utils.json.dumps(data)
result = salt.utils.http.query(url, method=method, params=query_params, data=data, decode=True, status=True, header_dict=headers, opts=__opts__)
if result.get('status', None) == http_client.OK:
ret['data'] = result.get('dict', result)
ret['res'] = True # depends on [control=['if'], data=[]]
elif result.get('status', None) == http_client.NO_CONTENT:
ret['res'] = False # depends on [control=['if'], data=[]]
elif result.get('status', None) == http_client.NOT_FOUND:
ret['data'] = 'Key not found.'
ret['res'] = False # depends on [control=['if'], data=[]]
elif result:
ret['data'] = result
ret['res'] = True # depends on [control=['if'], data=[]]
else:
ret['res'] = False
return ret
|
def get_standard_fwl_rules(self, firewall_id):
"""Get the rules of a standard firewall.
:param integer firewall_id: the instance ID of the standard firewall
:returns: A list of the rules.
"""
svc = self.client['Network_Component_Firewall']
return svc.getRules(id=firewall_id, mask=RULE_MASK)
|
def function[get_standard_fwl_rules, parameter[self, firewall_id]]:
constant[Get the rules of a standard firewall.
:param integer firewall_id: the instance ID of the standard firewall
:returns: A list of the rules.
]
variable[svc] assign[=] call[name[self].client][constant[Network_Component_Firewall]]
return[call[name[svc].getRules, parameter[]]]
|
keyword[def] identifier[get_standard_fwl_rules] ( identifier[self] , identifier[firewall_id] ):
literal[string]
identifier[svc] = identifier[self] . identifier[client] [ literal[string] ]
keyword[return] identifier[svc] . identifier[getRules] ( identifier[id] = identifier[firewall_id] , identifier[mask] = identifier[RULE_MASK] )
|
def get_standard_fwl_rules(self, firewall_id):
"""Get the rules of a standard firewall.
:param integer firewall_id: the instance ID of the standard firewall
:returns: A list of the rules.
"""
svc = self.client['Network_Component_Firewall']
return svc.getRules(id=firewall_id, mask=RULE_MASK)
|
def fetch(args):
"""
%prog fetch "query"
OR
%prog fetch queries.txt
Please provide a UniProt compatible `query` to retrieve data. If `query` contains
spaces, please remember to "quote" it.
You can also specify a `filename` which contains queries, one per line.
Follow this syntax <http://www.uniprot.org/help/text-search#text-search-syntax>
to query any of the documented fields <http://www.uniprot.org/help/query-fields>
"""
import re
import csv
p = OptionParser(fetch.__doc__)
p.add_option("--format", default="tab", choices=valid_formats,
help="download format [default: %default]")
p.add_option("--columns", default="entry name, protein names, genes,organism",
help="columns to download, if --format is `tab` or `xls`." +
" [default: %default]")
p.add_option("--include", default=False, action="store_true",
help="Include isoforms when --format is `fasta` or include `description` when" +
" --format is `rdf`. [default: %default]")
p.add_option("--limit", default=10, type="int",
help="Max number of results to retrieve [default: %default]")
p.add_option("--offset", default=0, type="int",
help="Offset of first result, used with --limit [default: %default]")
p.add_option("--skipcheck", default=False, action="store_true",
help="turn off prompt to check file existence [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
query, = args
url_params = {}
if op.exists(query):
pf = query.rsplit(".", 1)[0]
list_of_queries = [row.strip() for row in open(query)]
else:
# the query is the search term
pf = query.strip().strip('\"')
list_of_queries = [pf]
pf = re.sub(r"\s+", '_', pf)
assert len(list_of_queries) > 0, \
"Please provide atleast one input query"
url_params['format'] = opts.format
if opts.columns and opts.format in valid_column_formats:
reader = csv.reader([opts.columns], skipinitialspace=True)
cols = [col for r in reader for col in r]
for col in cols:
assert col in valid_columns, \
"Column '{0}' is not a valid. Allowed options are {1}".\
format(col, valid_columns)
url_params['columns'] = ",".join(cols)
if opts.include and opts.format in valid_include_formats:
url_params['include'] = "yes"
url_params['limit'] = opts.limit
url_params['offset'] = opts.offset
outfile = "{0}.{1}".format(pf, opts.format)
# If noprompt, will not check file existence
fw = must_open(outfile, "w", checkexists=True,
skipcheck=opts.skipcheck)
if fw is None:
return
seen = set()
for query in list_of_queries:
if query in seen:
logging.error("Duplicate query ({0}) found".format(query))
continue
url_params['query'] = query
data = urlencode(url_params)
try:
request = Request(uniprot_url, data)
response = urlopen(request)
except (HTTPError, URLError,
RuntimeError, KeyError) as e:
logging.error(e)
logging.debug("wait 5 seconds to reconnect...")
time.sleep(5)
page = response.read()
if not page:
logging.error("query `{0}` yielded no results".format(query))
continue
print(page, file=fw)
seen.add(query)
if seen:
print("A total of {0} out of {1} queries returned results.".
format(len(seen), len(list_of_queries)), file=sys.stderr)
|
def function[fetch, parameter[args]]:
constant[
%prog fetch "query"
OR
%prog fetch queries.txt
Please provide a UniProt compatible `query` to retrieve data. If `query` contains
spaces, please remember to "quote" it.
You can also specify a `filename` which contains queries, one per line.
Follow this syntax <http://www.uniprot.org/help/text-search#text-search-syntax>
to query any of the documented fields <http://www.uniprot.org/help/query-fields>
]
import module[re]
import module[csv]
variable[p] assign[=] call[name[OptionParser], parameter[name[fetch].__doc__]]
call[name[p].add_option, parameter[constant[--format]]]
call[name[p].add_option, parameter[constant[--columns]]]
call[name[p].add_option, parameter[constant[--include]]]
call[name[p].add_option, parameter[constant[--limit]]]
call[name[p].add_option, parameter[constant[--offset]]]
call[name[p].add_option, parameter[constant[--skipcheck]]]
<ast.Tuple object at 0x7da1b076ad40> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b0769630>]]
<ast.Tuple object at 0x7da1b076bdc0> assign[=] name[args]
variable[url_params] assign[=] dictionary[[], []]
if call[name[op].exists, parameter[name[query]]] begin[:]
variable[pf] assign[=] call[call[name[query].rsplit, parameter[constant[.], constant[1]]]][constant[0]]
variable[list_of_queries] assign[=] <ast.ListComp object at 0x7da1b0768280>
assert[compare[call[name[len], parameter[name[list_of_queries]]] greater[>] constant[0]]]
call[name[url_params]][constant[format]] assign[=] name[opts].format
if <ast.BoolOp object at 0x7da1b08aa200> begin[:]
variable[reader] assign[=] call[name[csv].reader, parameter[list[[<ast.Attribute object at 0x7da1b08a9c60>]]]]
variable[cols] assign[=] <ast.ListComp object at 0x7da1b08a8eb0>
for taget[name[col]] in starred[name[cols]] begin[:]
assert[compare[name[col] in name[valid_columns]]]
call[name[url_params]][constant[columns]] assign[=] call[constant[,].join, parameter[name[cols]]]
if <ast.BoolOp object at 0x7da1b08a9870> begin[:]
call[name[url_params]][constant[include]] assign[=] constant[yes]
call[name[url_params]][constant[limit]] assign[=] name[opts].limit
call[name[url_params]][constant[offset]] assign[=] name[opts].offset
variable[outfile] assign[=] call[constant[{0}.{1}].format, parameter[name[pf], name[opts].format]]
variable[fw] assign[=] call[name[must_open], parameter[name[outfile], constant[w]]]
if compare[name[fw] is constant[None]] begin[:]
return[None]
variable[seen] assign[=] call[name[set], parameter[]]
for taget[name[query]] in starred[name[list_of_queries]] begin[:]
if compare[name[query] in name[seen]] begin[:]
call[name[logging].error, parameter[call[constant[Duplicate query ({0}) found].format, parameter[name[query]]]]]
continue
call[name[url_params]][constant[query]] assign[=] name[query]
variable[data] assign[=] call[name[urlencode], parameter[name[url_params]]]
<ast.Try object at 0x7da1b08a8910>
variable[page] assign[=] call[name[response].read, parameter[]]
if <ast.UnaryOp object at 0x7da1b08a9390> begin[:]
call[name[logging].error, parameter[call[constant[query `{0}` yielded no results].format, parameter[name[query]]]]]
continue
call[name[print], parameter[name[page]]]
call[name[seen].add, parameter[name[query]]]
if name[seen] begin[:]
call[name[print], parameter[call[constant[A total of {0} out of {1} queries returned results.].format, parameter[call[name[len], parameter[name[seen]]], call[name[len], parameter[name[list_of_queries]]]]]]]
|
keyword[def] identifier[fetch] ( identifier[args] ):
literal[string]
keyword[import] identifier[re]
keyword[import] identifier[csv]
identifier[p] = identifier[OptionParser] ( identifier[fetch] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] , identifier[choices] = identifier[valid_formats] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string] +
literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] +
literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[query] ,= identifier[args]
identifier[url_params] ={}
keyword[if] identifier[op] . identifier[exists] ( identifier[query] ):
identifier[pf] = identifier[query] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[list_of_queries] =[ identifier[row] . identifier[strip] () keyword[for] identifier[row] keyword[in] identifier[open] ( identifier[query] )]
keyword[else] :
identifier[pf] = identifier[query] . identifier[strip] (). identifier[strip] ( literal[string] )
identifier[list_of_queries] =[ identifier[pf] ]
identifier[pf] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[pf] )
keyword[assert] identifier[len] ( identifier[list_of_queries] )> literal[int] , literal[string]
identifier[url_params] [ literal[string] ]= identifier[opts] . identifier[format]
keyword[if] identifier[opts] . identifier[columns] keyword[and] identifier[opts] . identifier[format] keyword[in] identifier[valid_column_formats] :
identifier[reader] = identifier[csv] . identifier[reader] ([ identifier[opts] . identifier[columns] ], identifier[skipinitialspace] = keyword[True] )
identifier[cols] =[ identifier[col] keyword[for] identifier[r] keyword[in] identifier[reader] keyword[for] identifier[col] keyword[in] identifier[r] ]
keyword[for] identifier[col] keyword[in] identifier[cols] :
keyword[assert] identifier[col] keyword[in] identifier[valid_columns] , literal[string] . identifier[format] ( identifier[col] , identifier[valid_columns] )
identifier[url_params] [ literal[string] ]= literal[string] . identifier[join] ( identifier[cols] )
keyword[if] identifier[opts] . identifier[include] keyword[and] identifier[opts] . identifier[format] keyword[in] identifier[valid_include_formats] :
identifier[url_params] [ literal[string] ]= literal[string]
identifier[url_params] [ literal[string] ]= identifier[opts] . identifier[limit]
identifier[url_params] [ literal[string] ]= identifier[opts] . identifier[offset]
identifier[outfile] = literal[string] . identifier[format] ( identifier[pf] , identifier[opts] . identifier[format] )
identifier[fw] = identifier[must_open] ( identifier[outfile] , literal[string] , identifier[checkexists] = keyword[True] ,
identifier[skipcheck] = identifier[opts] . identifier[skipcheck] )
keyword[if] identifier[fw] keyword[is] keyword[None] :
keyword[return]
identifier[seen] = identifier[set] ()
keyword[for] identifier[query] keyword[in] identifier[list_of_queries] :
keyword[if] identifier[query] keyword[in] identifier[seen] :
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[query] ))
keyword[continue]
identifier[url_params] [ literal[string] ]= identifier[query]
identifier[data] = identifier[urlencode] ( identifier[url_params] )
keyword[try] :
identifier[request] = identifier[Request] ( identifier[uniprot_url] , identifier[data] )
identifier[response] = identifier[urlopen] ( identifier[request] )
keyword[except] ( identifier[HTTPError] , identifier[URLError] ,
identifier[RuntimeError] , identifier[KeyError] ) keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( identifier[e] )
identifier[logging] . identifier[debug] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[page] = identifier[response] . identifier[read] ()
keyword[if] keyword[not] identifier[page] :
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[query] ))
keyword[continue]
identifier[print] ( identifier[page] , identifier[file] = identifier[fw] )
identifier[seen] . identifier[add] ( identifier[query] )
keyword[if] identifier[seen] :
identifier[print] ( literal[string] .
identifier[format] ( identifier[len] ( identifier[seen] ), identifier[len] ( identifier[list_of_queries] )), identifier[file] = identifier[sys] . identifier[stderr] )
|
def fetch(args):
"""
%prog fetch "query"
OR
%prog fetch queries.txt
Please provide a UniProt compatible `query` to retrieve data. If `query` contains
spaces, please remember to "quote" it.
You can also specify a `filename` which contains queries, one per line.
Follow this syntax <http://www.uniprot.org/help/text-search#text-search-syntax>
to query any of the documented fields <http://www.uniprot.org/help/query-fields>
"""
import re
import csv
p = OptionParser(fetch.__doc__)
p.add_option('--format', default='tab', choices=valid_formats, help='download format [default: %default]')
p.add_option('--columns', default='entry name, protein names, genes,organism', help='columns to download, if --format is `tab` or `xls`.' + ' [default: %default]')
p.add_option('--include', default=False, action='store_true', help='Include isoforms when --format is `fasta` or include `description` when' + ' --format is `rdf`. [default: %default]')
p.add_option('--limit', default=10, type='int', help='Max number of results to retrieve [default: %default]')
p.add_option('--offset', default=0, type='int', help='Offset of first result, used with --limit [default: %default]')
p.add_option('--skipcheck', default=False, action='store_true', help='turn off prompt to check file existence [default: %default]')
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(query,) = args
url_params = {}
if op.exists(query):
pf = query.rsplit('.', 1)[0]
list_of_queries = [row.strip() for row in open(query)] # depends on [control=['if'], data=[]]
else:
# the query is the search term
pf = query.strip().strip('"')
list_of_queries = [pf]
pf = re.sub('\\s+', '_', pf)
assert len(list_of_queries) > 0, 'Please provide atleast one input query'
url_params['format'] = opts.format
if opts.columns and opts.format in valid_column_formats:
reader = csv.reader([opts.columns], skipinitialspace=True)
cols = [col for r in reader for col in r]
for col in cols:
assert col in valid_columns, "Column '{0}' is not a valid. Allowed options are {1}".format(col, valid_columns) # depends on [control=['for'], data=['col']]
url_params['columns'] = ','.join(cols) # depends on [control=['if'], data=[]]
if opts.include and opts.format in valid_include_formats:
url_params['include'] = 'yes' # depends on [control=['if'], data=[]]
url_params['limit'] = opts.limit
url_params['offset'] = opts.offset
outfile = '{0}.{1}'.format(pf, opts.format)
# If noprompt, will not check file existence
fw = must_open(outfile, 'w', checkexists=True, skipcheck=opts.skipcheck)
if fw is None:
return # depends on [control=['if'], data=[]]
seen = set()
for query in list_of_queries:
if query in seen:
logging.error('Duplicate query ({0}) found'.format(query))
continue # depends on [control=['if'], data=['query']]
url_params['query'] = query
data = urlencode(url_params)
try:
request = Request(uniprot_url, data)
response = urlopen(request) # depends on [control=['try'], data=[]]
except (HTTPError, URLError, RuntimeError, KeyError) as e:
logging.error(e)
logging.debug('wait 5 seconds to reconnect...')
time.sleep(5) # depends on [control=['except'], data=['e']]
page = response.read()
if not page:
logging.error('query `{0}` yielded no results'.format(query))
continue # depends on [control=['if'], data=[]]
print(page, file=fw)
seen.add(query) # depends on [control=['for'], data=['query']]
if seen:
print('A total of {0} out of {1} queries returned results.'.format(len(seen), len(list_of_queries)), file=sys.stderr) # depends on [control=['if'], data=[]]
|
def mylogger(name=None, filename=None, indent_offset=7, level=_logging.DEBUG, stream_level=_logging.WARN, file_level=_logging.INFO):
"""
Sets up logging to *filename*.debug.log, *filename*.log, and the terminal. *indent_offset* attempts to line up the lowest indent level to 0. Custom levels:
* *level*: Parent logging level.
* *stream_level*: Logging level for console stream.
* *file_level*: Logging level for general file log.
"""
if name is not None:
logger = _logging.getLogger(name)
else:
logger = _logging.getLogger()
logger.setLevel(level)
fmtr = IndentFormatter(indent_offset=indent_offset)
fmtr_msgonly = IndentFormatter('%(funcName)s:%(lineno)d: %(message)s')
ch = _logging.StreamHandler()
ch.setLevel(stream_level)
ch.setFormatter(fmtr_msgonly)
logger.addHandler(ch)
if filename is not None:
debugh = _logging.FileHandler(filename='{}_debug.log'.format(filename), mode='w')
debugh.setLevel(_logging.DEBUG)
debugh.setFormatter(fmtr_msgonly)
logger.addHandler(debugh)
fh = _logging.FileHandler(filename='{}.log'.format(filename), mode='w')
fh.setLevel(file_level)
fh.setFormatter(fmtr)
logger.addHandler(fh)
return logger
|
def function[mylogger, parameter[name, filename, indent_offset, level, stream_level, file_level]]:
constant[
Sets up logging to *filename*.debug.log, *filename*.log, and the terminal. *indent_offset* attempts to line up the lowest indent level to 0. Custom levels:
* *level*: Parent logging level.
* *stream_level*: Logging level for console stream.
* *file_level*: Logging level for general file log.
]
if compare[name[name] is_not constant[None]] begin[:]
variable[logger] assign[=] call[name[_logging].getLogger, parameter[name[name]]]
call[name[logger].setLevel, parameter[name[level]]]
variable[fmtr] assign[=] call[name[IndentFormatter], parameter[]]
variable[fmtr_msgonly] assign[=] call[name[IndentFormatter], parameter[constant[%(funcName)s:%(lineno)d: %(message)s]]]
variable[ch] assign[=] call[name[_logging].StreamHandler, parameter[]]
call[name[ch].setLevel, parameter[name[stream_level]]]
call[name[ch].setFormatter, parameter[name[fmtr_msgonly]]]
call[name[logger].addHandler, parameter[name[ch]]]
if compare[name[filename] is_not constant[None]] begin[:]
variable[debugh] assign[=] call[name[_logging].FileHandler, parameter[]]
call[name[debugh].setLevel, parameter[name[_logging].DEBUG]]
call[name[debugh].setFormatter, parameter[name[fmtr_msgonly]]]
call[name[logger].addHandler, parameter[name[debugh]]]
variable[fh] assign[=] call[name[_logging].FileHandler, parameter[]]
call[name[fh].setLevel, parameter[name[file_level]]]
call[name[fh].setFormatter, parameter[name[fmtr]]]
call[name[logger].addHandler, parameter[name[fh]]]
return[name[logger]]
|
keyword[def] identifier[mylogger] ( identifier[name] = keyword[None] , identifier[filename] = keyword[None] , identifier[indent_offset] = literal[int] , identifier[level] = identifier[_logging] . identifier[DEBUG] , identifier[stream_level] = identifier[_logging] . identifier[WARN] , identifier[file_level] = identifier[_logging] . identifier[INFO] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[logger] = identifier[_logging] . identifier[getLogger] ( identifier[name] )
keyword[else] :
identifier[logger] = identifier[_logging] . identifier[getLogger] ()
identifier[logger] . identifier[setLevel] ( identifier[level] )
identifier[fmtr] = identifier[IndentFormatter] ( identifier[indent_offset] = identifier[indent_offset] )
identifier[fmtr_msgonly] = identifier[IndentFormatter] ( literal[string] )
identifier[ch] = identifier[_logging] . identifier[StreamHandler] ()
identifier[ch] . identifier[setLevel] ( identifier[stream_level] )
identifier[ch] . identifier[setFormatter] ( identifier[fmtr_msgonly] )
identifier[logger] . identifier[addHandler] ( identifier[ch] )
keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] :
identifier[debugh] = identifier[_logging] . identifier[FileHandler] ( identifier[filename] = literal[string] . identifier[format] ( identifier[filename] ), identifier[mode] = literal[string] )
identifier[debugh] . identifier[setLevel] ( identifier[_logging] . identifier[DEBUG] )
identifier[debugh] . identifier[setFormatter] ( identifier[fmtr_msgonly] )
identifier[logger] . identifier[addHandler] ( identifier[debugh] )
identifier[fh] = identifier[_logging] . identifier[FileHandler] ( identifier[filename] = literal[string] . identifier[format] ( identifier[filename] ), identifier[mode] = literal[string] )
identifier[fh] . identifier[setLevel] ( identifier[file_level] )
identifier[fh] . identifier[setFormatter] ( identifier[fmtr] )
identifier[logger] . identifier[addHandler] ( identifier[fh] )
keyword[return] identifier[logger]
|
def mylogger(name=None, filename=None, indent_offset=7, level=_logging.DEBUG, stream_level=_logging.WARN, file_level=_logging.INFO):
"""
Sets up logging to *filename*.debug.log, *filename*.log, and the terminal. *indent_offset* attempts to line up the lowest indent level to 0. Custom levels:
* *level*: Parent logging level.
* *stream_level*: Logging level for console stream.
* *file_level*: Logging level for general file log.
"""
if name is not None:
logger = _logging.getLogger(name) # depends on [control=['if'], data=['name']]
else:
logger = _logging.getLogger()
logger.setLevel(level)
fmtr = IndentFormatter(indent_offset=indent_offset)
fmtr_msgonly = IndentFormatter('%(funcName)s:%(lineno)d: %(message)s')
ch = _logging.StreamHandler()
ch.setLevel(stream_level)
ch.setFormatter(fmtr_msgonly)
logger.addHandler(ch)
if filename is not None:
debugh = _logging.FileHandler(filename='{}_debug.log'.format(filename), mode='w')
debugh.setLevel(_logging.DEBUG)
debugh.setFormatter(fmtr_msgonly)
logger.addHandler(debugh)
fh = _logging.FileHandler(filename='{}.log'.format(filename), mode='w')
fh.setLevel(file_level)
fh.setFormatter(fmtr)
logger.addHandler(fh) # depends on [control=['if'], data=['filename']]
return logger
|
def post_build(self, pkt, pay):
"""
Apply the previous methods according to the writing cipher type.
"""
# Compute the length of TLSPlaintext fragment
hdr, frag = pkt[:5], pkt[5:]
if not isinstance(self.tls_session.rcs.cipher, Cipher_NULL):
frag = self._tls_auth_encrypt(frag)
if self.len is not None:
# The user gave us a 'len', let's respect this ultimately
hdr = hdr[:3] + struct.pack("!H", self.len)
else:
# Update header with the length of TLSCiphertext.inner
hdr = hdr[:3] + struct.pack("!H", len(frag))
# Now we commit the pending write state if it has been triggered. We
# update nothing if the pwcs was not set. This probably means that
# we're working out-of-context (and we need to keep the default wcs).
if self.tls_session.triggered_pwcs_commit:
if self.tls_session.pwcs is not None:
self.tls_session.wcs = self.tls_session.pwcs
self.tls_session.pwcs = None
self.tls_session.triggered_pwcs_commit = False
return hdr + frag + pay
|
def function[post_build, parameter[self, pkt, pay]]:
constant[
Apply the previous methods according to the writing cipher type.
]
<ast.Tuple object at 0x7da1b21bb940> assign[=] tuple[[<ast.Subscript object at 0x7da1b21b9d50>, <ast.Subscript object at 0x7da1b21bacb0>]]
if <ast.UnaryOp object at 0x7da1b21b8fa0> begin[:]
variable[frag] assign[=] call[name[self]._tls_auth_encrypt, parameter[name[frag]]]
if compare[name[self].len is_not constant[None]] begin[:]
variable[hdr] assign[=] binary_operation[call[name[hdr]][<ast.Slice object at 0x7da1b21b8790>] + call[name[struct].pack, parameter[constant[!H], name[self].len]]]
if name[self].tls_session.triggered_pwcs_commit begin[:]
if compare[name[self].tls_session.pwcs is_not constant[None]] begin[:]
name[self].tls_session.wcs assign[=] name[self].tls_session.pwcs
name[self].tls_session.pwcs assign[=] constant[None]
name[self].tls_session.triggered_pwcs_commit assign[=] constant[False]
return[binary_operation[binary_operation[name[hdr] + name[frag]] + name[pay]]]
|
keyword[def] identifier[post_build] ( identifier[self] , identifier[pkt] , identifier[pay] ):
literal[string]
identifier[hdr] , identifier[frag] = identifier[pkt] [: literal[int] ], identifier[pkt] [ literal[int] :]
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[tls_session] . identifier[rcs] . identifier[cipher] , identifier[Cipher_NULL] ):
identifier[frag] = identifier[self] . identifier[_tls_auth_encrypt] ( identifier[frag] )
keyword[if] identifier[self] . identifier[len] keyword[is] keyword[not] keyword[None] :
identifier[hdr] = identifier[hdr] [: literal[int] ]+ identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[len] )
keyword[else] :
identifier[hdr] = identifier[hdr] [: literal[int] ]+ identifier[struct] . identifier[pack] ( literal[string] , identifier[len] ( identifier[frag] ))
keyword[if] identifier[self] . identifier[tls_session] . identifier[triggered_pwcs_commit] :
keyword[if] identifier[self] . identifier[tls_session] . identifier[pwcs] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[tls_session] . identifier[wcs] = identifier[self] . identifier[tls_session] . identifier[pwcs]
identifier[self] . identifier[tls_session] . identifier[pwcs] = keyword[None]
identifier[self] . identifier[tls_session] . identifier[triggered_pwcs_commit] = keyword[False]
keyword[return] identifier[hdr] + identifier[frag] + identifier[pay]
|
def post_build(self, pkt, pay):
"""
Apply the previous methods according to the writing cipher type.
"""
# Compute the length of TLSPlaintext fragment
(hdr, frag) = (pkt[:5], pkt[5:])
if not isinstance(self.tls_session.rcs.cipher, Cipher_NULL):
frag = self._tls_auth_encrypt(frag) # depends on [control=['if'], data=[]]
if self.len is not None:
# The user gave us a 'len', let's respect this ultimately
hdr = hdr[:3] + struct.pack('!H', self.len) # depends on [control=['if'], data=[]]
else:
# Update header with the length of TLSCiphertext.inner
hdr = hdr[:3] + struct.pack('!H', len(frag))
# Now we commit the pending write state if it has been triggered. We
# update nothing if the pwcs was not set. This probably means that
# we're working out-of-context (and we need to keep the default wcs).
if self.tls_session.triggered_pwcs_commit:
if self.tls_session.pwcs is not None:
self.tls_session.wcs = self.tls_session.pwcs
self.tls_session.pwcs = None # depends on [control=['if'], data=[]]
self.tls_session.triggered_pwcs_commit = False # depends on [control=['if'], data=[]]
return hdr + frag + pay
|
def compress(func):
"""Compress result with deflate algorithm if the client ask for it."""
def wrapper(*args, **kwargs):
"""Wrapper that take one function and return the compressed result."""
ret = func(*args, **kwargs)
logger.debug('Receive {} {} request with header: {}'.format(
request.method,
request.url,
['{}: {}'.format(h, request.headers.get(h)) for h in request.headers.keys()]
))
if 'deflate' in request.headers.get('Accept-Encoding', ''):
response.headers['Content-Encoding'] = 'deflate'
ret = deflate_compress(ret)
else:
response.headers['Content-Encoding'] = 'identity'
return ret
def deflate_compress(data, compress_level=6):
"""Compress given data using the DEFLATE algorithm"""
# Init compression
zobj = zlib.compressobj(compress_level,
zlib.DEFLATED,
zlib.MAX_WBITS,
zlib.DEF_MEM_LEVEL,
zlib.Z_DEFAULT_STRATEGY)
# Return compressed object
return zobj.compress(b(data)) + zobj.flush()
return wrapper
|
def function[compress, parameter[func]]:
constant[Compress result with deflate algorithm if the client ask for it.]
def function[wrapper, parameter[]]:
constant[Wrapper that take one function and return the compressed result.]
variable[ret] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da18fe90f40>]]
call[name[logger].debug, parameter[call[constant[Receive {} {} request with header: {}].format, parameter[name[request].method, name[request].url, <ast.ListComp object at 0x7da18fe927d0>]]]]
if compare[constant[deflate] in call[name[request].headers.get, parameter[constant[Accept-Encoding], constant[]]]] begin[:]
call[name[response].headers][constant[Content-Encoding]] assign[=] constant[deflate]
variable[ret] assign[=] call[name[deflate_compress], parameter[name[ret]]]
return[name[ret]]
def function[deflate_compress, parameter[data, compress_level]]:
constant[Compress given data using the DEFLATE algorithm]
variable[zobj] assign[=] call[name[zlib].compressobj, parameter[name[compress_level], name[zlib].DEFLATED, name[zlib].MAX_WBITS, name[zlib].DEF_MEM_LEVEL, name[zlib].Z_DEFAULT_STRATEGY]]
return[binary_operation[call[name[zobj].compress, parameter[call[name[b], parameter[name[data]]]]] + call[name[zobj].flush, parameter[]]]]
return[name[wrapper]]
|
keyword[def] identifier[compress] ( identifier[func] ):
literal[string]
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[ret] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[request] . identifier[method] ,
identifier[request] . identifier[url] ,
[ literal[string] . identifier[format] ( identifier[h] , identifier[request] . identifier[headers] . identifier[get] ( identifier[h] )) keyword[for] identifier[h] keyword[in] identifier[request] . identifier[headers] . identifier[keys] ()]
))
keyword[if] literal[string] keyword[in] identifier[request] . identifier[headers] . identifier[get] ( literal[string] , literal[string] ):
identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
identifier[ret] = identifier[deflate_compress] ( identifier[ret] )
keyword[else] :
identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[def] identifier[deflate_compress] ( identifier[data] , identifier[compress_level] = literal[int] ):
literal[string]
identifier[zobj] = identifier[zlib] . identifier[compressobj] ( identifier[compress_level] ,
identifier[zlib] . identifier[DEFLATED] ,
identifier[zlib] . identifier[MAX_WBITS] ,
identifier[zlib] . identifier[DEF_MEM_LEVEL] ,
identifier[zlib] . identifier[Z_DEFAULT_STRATEGY] )
keyword[return] identifier[zobj] . identifier[compress] ( identifier[b] ( identifier[data] ))+ identifier[zobj] . identifier[flush] ()
keyword[return] identifier[wrapper]
|
def compress(func):
"""Compress result with deflate algorithm if the client ask for it."""
def wrapper(*args, **kwargs):
"""Wrapper that take one function and return the compressed result."""
ret = func(*args, **kwargs)
logger.debug('Receive {} {} request with header: {}'.format(request.method, request.url, ['{}: {}'.format(h, request.headers.get(h)) for h in request.headers.keys()]))
if 'deflate' in request.headers.get('Accept-Encoding', ''):
response.headers['Content-Encoding'] = 'deflate'
ret = deflate_compress(ret) # depends on [control=['if'], data=[]]
else:
response.headers['Content-Encoding'] = 'identity'
return ret
def deflate_compress(data, compress_level=6):
"""Compress given data using the DEFLATE algorithm"""
# Init compression
zobj = zlib.compressobj(compress_level, zlib.DEFLATED, zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, zlib.Z_DEFAULT_STRATEGY)
# Return compressed object
return zobj.compress(b(data)) + zobj.flush()
return wrapper
|
def shutdown_kernel(self, kernel_id):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
"""
self.get_kernel(kernel_id).shutdown_kernel()
del self._kernels[kernel_id]
|
def function[shutdown_kernel, parameter[self, kernel_id]]:
constant[Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
]
call[call[name[self].get_kernel, parameter[name[kernel_id]]].shutdown_kernel, parameter[]]
<ast.Delete object at 0x7da18bc72e00>
|
keyword[def] identifier[shutdown_kernel] ( identifier[self] , identifier[kernel_id] ):
literal[string]
identifier[self] . identifier[get_kernel] ( identifier[kernel_id] ). identifier[shutdown_kernel] ()
keyword[del] identifier[self] . identifier[_kernels] [ identifier[kernel_id] ]
|
def shutdown_kernel(self, kernel_id):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
"""
self.get_kernel(kernel_id).shutdown_kernel()
del self._kernels[kernel_id]
|
def calc_q0_perc_uz_v1(self):
"""Perform the upper zone layer routine which determines percolation
to the lower zone layer and the fast response of the hland model.
Note that the system behaviour of this method depends strongly on the
specifications of the options |RespArea| and |RecStep|.
Required control parameters:
|RecStep|
|PercMax|
|K|
|Alpha|
Required derived parameters:
|DT|
Required fluxes sequence:
|InUZ|
Calculated fluxes sequences:
|Perc|
|Q0|
Updated state sequence:
|UZ|
Basic equations:
:math:`\\frac{dUZ}{dt} = InUZ - Perc - Q0` \n
:math:`Perc = PercMax \\cdot ContriArea` \n
:math:`Q0 = K * \\cdot \\left( \\frac{UZ}{ContriArea} \\right)^{1+Alpha}`
Examples:
The upper zone layer routine is an exception compared to
the other routines of the HydPy-H-Land model, regarding its
consideration of numerical accuracy. To increase the accuracy of
the numerical integration of the underlying ordinary differential
equation, each simulation step can be divided into substeps, which
are all solved with first order accuracy. In the first example,
this option is omitted through setting the RecStep parameter to one:
>>> from hydpy.models.hland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> recstep(2)
>>> derived.dt = 1/recstep
>>> percmax(2.0)
>>> alpha(1.0)
>>> k(2.0)
>>> fluxes.contriarea = 1.0
>>> fluxes.inuz = 0.0
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(1.0)
>>> fluxes.q0
q0(0.0)
>>> states.uz
uz(0.0)
Due to the sequential calculation of the upper zone routine, the
upper zone storage is drained completely through percolation and
no water is left for fast discharge response. By dividing the
simulation step in 100 substeps, the results are quite different:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.786934)
>>> fluxes.q0
q0(0.213066)
>>> states.uz
uz(0.0)
Note that the assumed length of the simulation step is only a
half day. Hence the effective values of the maximum percolation
rate and the storage coefficient is not 2 but 1:
>>> percmax
percmax(2.0)
>>> k
k(2.0)
>>> percmax.value
1.0
>>> k.value
1.0
By decreasing the contributing area one decreases percolation but
increases fast discharge response:
>>> fluxes.contriarea = 0.5
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.434108)
>>> fluxes.q0
q0(0.565892)
>>> states.uz
uz(0.0)
Resetting RecStep leads to more transparent results. Note that, due
to the large value of the storage coefficient and the low accuracy
of the numerical approximation, direct discharge drains the rest of
the upper zone storage:
>>> recstep(2)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.5)
>>> states.uz
uz(0.0)
Applying a more reasonable storage coefficient results in:
>>> k(0.5)
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.25)
>>> states.uz
uz(0.25)
Adding an input of 0.3 mm results the same percolation value (which,
in the given example, is determined by the maximum percolation rate
only), but in an increases value of the direct response (which
always depends on the actual upper zone storage directly):
>>> fluxes.inuz = 0.3
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.64)
>>> states.uz
uz(0.16)
Due to the same reasons, another increase in numerical accuracy has
no impact on percolation but decreases the direct response in the
given example:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.421708)
>>> states.uz
uz(0.378292)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
sta = self.sequences.states.fastaccess
flu.perc = 0.
flu.q0 = 0.
for dummy in range(con.recstep):
# First state update related to the upper zone input.
sta.uz += der.dt*flu.inuz
# Second state update related to percolation.
d_perc = min(der.dt*con.percmax*flu.contriarea, sta.uz)
sta.uz -= d_perc
flu.perc += d_perc
# Third state update related to fast runoff response.
if sta.uz > 0.:
if flu.contriarea > 0.:
d_q0 = (der.dt*con.k *
(sta.uz/flu.contriarea)**(1.+con.alpha))
d_q0 = min(d_q0, sta.uz)
else:
d_q0 = sta.uz
sta.uz -= d_q0
flu.q0 += d_q0
else:
d_q0 = 0.
|
def function[calc_q0_perc_uz_v1, parameter[self]]:
constant[Perform the upper zone layer routine which determines percolation
to the lower zone layer and the fast response of the hland model.
Note that the system behaviour of this method depends strongly on the
specifications of the options |RespArea| and |RecStep|.
Required control parameters:
|RecStep|
|PercMax|
|K|
|Alpha|
Required derived parameters:
|DT|
Required fluxes sequence:
|InUZ|
Calculated fluxes sequences:
|Perc|
|Q0|
Updated state sequence:
|UZ|
Basic equations:
:math:`\frac{dUZ}{dt} = InUZ - Perc - Q0`
:math:`Perc = PercMax \cdot ContriArea`
:math:`Q0 = K * \cdot \left( \frac{UZ}{ContriArea} \right)^{1+Alpha}`
Examples:
The upper zone layer routine is an exception compared to
the other routines of the HydPy-H-Land model, regarding its
consideration of numerical accuracy. To increase the accuracy of
the numerical integration of the underlying ordinary differential
equation, each simulation step can be divided into substeps, which
are all solved with first order accuracy. In the first example,
this option is omitted through setting the RecStep parameter to one:
>>> from hydpy.models.hland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> recstep(2)
>>> derived.dt = 1/recstep
>>> percmax(2.0)
>>> alpha(1.0)
>>> k(2.0)
>>> fluxes.contriarea = 1.0
>>> fluxes.inuz = 0.0
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(1.0)
>>> fluxes.q0
q0(0.0)
>>> states.uz
uz(0.0)
Due to the sequential calculation of the upper zone routine, the
upper zone storage is drained completely through percolation and
no water is left for fast discharge response. By dividing the
simulation step in 100 substeps, the results are quite different:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.786934)
>>> fluxes.q0
q0(0.213066)
>>> states.uz
uz(0.0)
Note that the assumed length of the simulation step is only a
half day. Hence the effective values of the maximum percolation
rate and the storage coefficient is not 2 but 1:
>>> percmax
percmax(2.0)
>>> k
k(2.0)
>>> percmax.value
1.0
>>> k.value
1.0
By decreasing the contributing area one decreases percolation but
increases fast discharge response:
>>> fluxes.contriarea = 0.5
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.434108)
>>> fluxes.q0
q0(0.565892)
>>> states.uz
uz(0.0)
Resetting RecStep leads to more transparent results. Note that, due
to the large value of the storage coefficient and the low accuracy
of the numerical approximation, direct discharge drains the rest of
the upper zone storage:
>>> recstep(2)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.5)
>>> states.uz
uz(0.0)
Applying a more reasonable storage coefficient results in:
>>> k(0.5)
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.25)
>>> states.uz
uz(0.25)
Adding an input of 0.3 mm results the same percolation value (which,
in the given example, is determined by the maximum percolation rate
only), but in an increases value of the direct response (which
always depends on the actual upper zone storage directly):
>>> fluxes.inuz = 0.3
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.64)
>>> states.uz
uz(0.16)
Due to the same reasons, another increase in numerical accuracy has
no impact on percolation but decreases the direct response in the
given example:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.421708)
>>> states.uz
uz(0.378292)
]
variable[con] assign[=] name[self].parameters.control.fastaccess
variable[der] assign[=] name[self].parameters.derived.fastaccess
variable[flu] assign[=] name[self].sequences.fluxes.fastaccess
variable[sta] assign[=] name[self].sequences.states.fastaccess
name[flu].perc assign[=] constant[0.0]
name[flu].q0 assign[=] constant[0.0]
for taget[name[dummy]] in starred[call[name[range], parameter[name[con].recstep]]] begin[:]
<ast.AugAssign object at 0x7da2044c1690>
variable[d_perc] assign[=] call[name[min], parameter[binary_operation[binary_operation[name[der].dt * name[con].percmax] * name[flu].contriarea], name[sta].uz]]
<ast.AugAssign object at 0x7da18bccae00>
<ast.AugAssign object at 0x7da18bccbf10>
if compare[name[sta].uz greater[>] constant[0.0]] begin[:]
if compare[name[flu].contriarea greater[>] constant[0.0]] begin[:]
variable[d_q0] assign[=] binary_operation[binary_operation[name[der].dt * name[con].k] * binary_operation[binary_operation[name[sta].uz / name[flu].contriarea] ** binary_operation[constant[1.0] + name[con].alpha]]]
variable[d_q0] assign[=] call[name[min], parameter[name[d_q0], name[sta].uz]]
<ast.AugAssign object at 0x7da18bcc9c00>
<ast.AugAssign object at 0x7da18bcca9b0>
|
keyword[def] identifier[calc_q0_perc_uz_v1] ( identifier[self] ):
literal[string]
identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess]
identifier[der] = identifier[self] . identifier[parameters] . identifier[derived] . identifier[fastaccess]
identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess]
identifier[sta] = identifier[self] . identifier[sequences] . identifier[states] . identifier[fastaccess]
identifier[flu] . identifier[perc] = literal[int]
identifier[flu] . identifier[q0] = literal[int]
keyword[for] identifier[dummy] keyword[in] identifier[range] ( identifier[con] . identifier[recstep] ):
identifier[sta] . identifier[uz] += identifier[der] . identifier[dt] * identifier[flu] . identifier[inuz]
identifier[d_perc] = identifier[min] ( identifier[der] . identifier[dt] * identifier[con] . identifier[percmax] * identifier[flu] . identifier[contriarea] , identifier[sta] . identifier[uz] )
identifier[sta] . identifier[uz] -= identifier[d_perc]
identifier[flu] . identifier[perc] += identifier[d_perc]
keyword[if] identifier[sta] . identifier[uz] > literal[int] :
keyword[if] identifier[flu] . identifier[contriarea] > literal[int] :
identifier[d_q0] =( identifier[der] . identifier[dt] * identifier[con] . identifier[k] *
( identifier[sta] . identifier[uz] / identifier[flu] . identifier[contriarea] )**( literal[int] + identifier[con] . identifier[alpha] ))
identifier[d_q0] = identifier[min] ( identifier[d_q0] , identifier[sta] . identifier[uz] )
keyword[else] :
identifier[d_q0] = identifier[sta] . identifier[uz]
identifier[sta] . identifier[uz] -= identifier[d_q0]
identifier[flu] . identifier[q0] += identifier[d_q0]
keyword[else] :
identifier[d_q0] = literal[int]
|
def calc_q0_perc_uz_v1(self):
"""Perform the upper zone layer routine which determines percolation
to the lower zone layer and the fast response of the hland model.
Note that the system behaviour of this method depends strongly on the
specifications of the options |RespArea| and |RecStep|.
Required control parameters:
|RecStep|
|PercMax|
|K|
|Alpha|
Required derived parameters:
|DT|
Required fluxes sequence:
|InUZ|
Calculated fluxes sequences:
|Perc|
|Q0|
Updated state sequence:
|UZ|
Basic equations:
:math:`\\frac{dUZ}{dt} = InUZ - Perc - Q0`
:math:`Perc = PercMax \\cdot ContriArea`
:math:`Q0 = K * \\cdot \\left( \\frac{UZ}{ContriArea} \\right)^{1+Alpha}`
Examples:
The upper zone layer routine is an exception compared to
the other routines of the HydPy-H-Land model, regarding its
consideration of numerical accuracy. To increase the accuracy of
the numerical integration of the underlying ordinary differential
equation, each simulation step can be divided into substeps, which
are all solved with first order accuracy. In the first example,
this option is omitted through setting the RecStep parameter to one:
>>> from hydpy.models.hland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> recstep(2)
>>> derived.dt = 1/recstep
>>> percmax(2.0)
>>> alpha(1.0)
>>> k(2.0)
>>> fluxes.contriarea = 1.0
>>> fluxes.inuz = 0.0
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(1.0)
>>> fluxes.q0
q0(0.0)
>>> states.uz
uz(0.0)
Due to the sequential calculation of the upper zone routine, the
upper zone storage is drained completely through percolation and
no water is left for fast discharge response. By dividing the
simulation step in 100 substeps, the results are quite different:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.786934)
>>> fluxes.q0
q0(0.213066)
>>> states.uz
uz(0.0)
Note that the assumed length of the simulation step is only a
half day. Hence the effective values of the maximum percolation
rate and the storage coefficient is not 2 but 1:
>>> percmax
percmax(2.0)
>>> k
k(2.0)
>>> percmax.value
1.0
>>> k.value
1.0
By decreasing the contributing area one decreases percolation but
increases fast discharge response:
>>> fluxes.contriarea = 0.5
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.434108)
>>> fluxes.q0
q0(0.565892)
>>> states.uz
uz(0.0)
Resetting RecStep leads to more transparent results. Note that, due
to the large value of the storage coefficient and the low accuracy
of the numerical approximation, direct discharge drains the rest of
the upper zone storage:
>>> recstep(2)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.5)
>>> states.uz
uz(0.0)
Applying a more reasonable storage coefficient results in:
>>> k(0.5)
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.25)
>>> states.uz
uz(0.25)
Adding an input of 0.3 mm results the same percolation value (which,
in the given example, is determined by the maximum percolation rate
only), but in an increases value of the direct response (which
always depends on the actual upper zone storage directly):
>>> fluxes.inuz = 0.3
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.64)
>>> states.uz
uz(0.16)
Due to the same reasons, another increase in numerical accuracy has
no impact on percolation but decreases the direct response in the
given example:
>>> recstep(200)
>>> derived.dt = 1.0/recstep
>>> states.uz = 1.0
>>> model.calc_q0_perc_uz_v1()
>>> fluxes.perc
perc(0.5)
>>> fluxes.q0
q0(0.421708)
>>> states.uz
uz(0.378292)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
sta = self.sequences.states.fastaccess
flu.perc = 0.0
flu.q0 = 0.0
for dummy in range(con.recstep):
# First state update related to the upper zone input.
sta.uz += der.dt * flu.inuz
# Second state update related to percolation.
d_perc = min(der.dt * con.percmax * flu.contriarea, sta.uz)
sta.uz -= d_perc
flu.perc += d_perc
# Third state update related to fast runoff response.
if sta.uz > 0.0:
if flu.contriarea > 0.0:
d_q0 = der.dt * con.k * (sta.uz / flu.contriarea) ** (1.0 + con.alpha)
d_q0 = min(d_q0, sta.uz) # depends on [control=['if'], data=[]]
else:
d_q0 = sta.uz
sta.uz -= d_q0
flu.q0 += d_q0 # depends on [control=['if'], data=[]]
else:
d_q0 = 0.0 # depends on [control=['for'], data=[]]
|
def lookup_ip(self, mac):
"""Look for a lease object with given mac address and return the
assigned ip address.
@type mac: str
@rtype: str or None
@raises ValueError:
@raises OmapiError:
@raises OmapiErrorNotFound: if no lease object with the given mac could be found
@raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a ip
@raises socket.error:
"""
res = self.lookup_by_lease(mac=mac)
try:
return res["ip-address"]
except KeyError:
raise OmapiErrorAttributeNotFound()
|
def function[lookup_ip, parameter[self, mac]]:
constant[Look for a lease object with given mac address and return the
assigned ip address.
@type mac: str
@rtype: str or None
@raises ValueError:
@raises OmapiError:
@raises OmapiErrorNotFound: if no lease object with the given mac could be found
@raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a ip
@raises socket.error:
]
variable[res] assign[=] call[name[self].lookup_by_lease, parameter[]]
<ast.Try object at 0x7da18ede5e40>
|
keyword[def] identifier[lookup_ip] ( identifier[self] , identifier[mac] ):
literal[string]
identifier[res] = identifier[self] . identifier[lookup_by_lease] ( identifier[mac] = identifier[mac] )
keyword[try] :
keyword[return] identifier[res] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[OmapiErrorAttributeNotFound] ()
|
def lookup_ip(self, mac):
"""Look for a lease object with given mac address and return the
assigned ip address.
@type mac: str
@rtype: str or None
@raises ValueError:
@raises OmapiError:
@raises OmapiErrorNotFound: if no lease object with the given mac could be found
@raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a ip
@raises socket.error:
"""
res = self.lookup_by_lease(mac=mac)
try:
return res['ip-address'] # depends on [control=['try'], data=[]]
except KeyError:
raise OmapiErrorAttributeNotFound() # depends on [control=['except'], data=[]]
|
def update_feature(self, dataset, fid, feature):
"""Inserts or updates a feature in a dataset.
Parameters
----------
dataset : str
The dataset id.
fid : str
The feature id.
If the dataset has no feature with the given feature id,
then a new feature will be created.
feature : dict
The GeoJSON feature object.
This should be one individual GeoJSON feature and not a
GeoJSON FeatureCollection.
Returns
-------
request.Response
The response contains a GeoJSON representation of the new or updated feature.
"""
uri = URITemplate(
self.baseuri + '/{owner}/{did}/features/{fid}').expand(
owner=self.username, did=dataset, fid=fid)
return self.session.put(uri, json=feature)
|
def function[update_feature, parameter[self, dataset, fid, feature]]:
constant[Inserts or updates a feature in a dataset.
Parameters
----------
dataset : str
The dataset id.
fid : str
The feature id.
If the dataset has no feature with the given feature id,
then a new feature will be created.
feature : dict
The GeoJSON feature object.
This should be one individual GeoJSON feature and not a
GeoJSON FeatureCollection.
Returns
-------
request.Response
The response contains a GeoJSON representation of the new or updated feature.
]
variable[uri] assign[=] call[call[name[URITemplate], parameter[binary_operation[name[self].baseuri + constant[/{owner}/{did}/features/{fid}]]]].expand, parameter[]]
return[call[name[self].session.put, parameter[name[uri]]]]
|
keyword[def] identifier[update_feature] ( identifier[self] , identifier[dataset] , identifier[fid] , identifier[feature] ):
literal[string]
identifier[uri] = identifier[URITemplate] (
identifier[self] . identifier[baseuri] + literal[string] ). identifier[expand] (
identifier[owner] = identifier[self] . identifier[username] , identifier[did] = identifier[dataset] , identifier[fid] = identifier[fid] )
keyword[return] identifier[self] . identifier[session] . identifier[put] ( identifier[uri] , identifier[json] = identifier[feature] )
|
def update_feature(self, dataset, fid, feature):
"""Inserts or updates a feature in a dataset.
Parameters
----------
dataset : str
The dataset id.
fid : str
The feature id.
If the dataset has no feature with the given feature id,
then a new feature will be created.
feature : dict
The GeoJSON feature object.
This should be one individual GeoJSON feature and not a
GeoJSON FeatureCollection.
Returns
-------
request.Response
The response contains a GeoJSON representation of the new or updated feature.
"""
uri = URITemplate(self.baseuri + '/{owner}/{did}/features/{fid}').expand(owner=self.username, did=dataset, fid=fid)
return self.session.put(uri, json=feature)
|
def parse_rst_content(content, state):
"""Parse rST-formatted string content into docutils nodes
Parameters
----------
content : `str`
ReStructuredText-formatted content
state : ``docutils.statemachine.State``
Usually the directive's ``state`` attribute.
Returns
-------
instance from ``docutils.nodes``
Docutils node representing the ``content``.
"""
# http://www.sphinx-doc.org/en/master/extdev/markupapi.html
# #parsing-directive-content-as-rest
container_node = nodes.section()
container_node.document = state.document
viewlist = ViewList()
for i, line in enumerate(content.splitlines()):
viewlist.append(line, source='', offset=i)
with switch_source_input(state, viewlist):
state.nested_parse(viewlist, 0, container_node)
return container_node.children
|
def function[parse_rst_content, parameter[content, state]]:
constant[Parse rST-formatted string content into docutils nodes
Parameters
----------
content : `str`
ReStructuredText-formatted content
state : ``docutils.statemachine.State``
Usually the directive's ``state`` attribute.
Returns
-------
instance from ``docutils.nodes``
Docutils node representing the ``content``.
]
variable[container_node] assign[=] call[name[nodes].section, parameter[]]
name[container_node].document assign[=] name[state].document
variable[viewlist] assign[=] call[name[ViewList], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b237f4f0>, <ast.Name object at 0x7da1b237f790>]]] in starred[call[name[enumerate], parameter[call[name[content].splitlines, parameter[]]]]] begin[:]
call[name[viewlist].append, parameter[name[line]]]
with call[name[switch_source_input], parameter[name[state], name[viewlist]]] begin[:]
call[name[state].nested_parse, parameter[name[viewlist], constant[0], name[container_node]]]
return[name[container_node].children]
|
keyword[def] identifier[parse_rst_content] ( identifier[content] , identifier[state] ):
literal[string]
identifier[container_node] = identifier[nodes] . identifier[section] ()
identifier[container_node] . identifier[document] = identifier[state] . identifier[document]
identifier[viewlist] = identifier[ViewList] ()
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[content] . identifier[splitlines] ()):
identifier[viewlist] . identifier[append] ( identifier[line] , identifier[source] = literal[string] , identifier[offset] = identifier[i] )
keyword[with] identifier[switch_source_input] ( identifier[state] , identifier[viewlist] ):
identifier[state] . identifier[nested_parse] ( identifier[viewlist] , literal[int] , identifier[container_node] )
keyword[return] identifier[container_node] . identifier[children]
|
def parse_rst_content(content, state):
"""Parse rST-formatted string content into docutils nodes
Parameters
----------
content : `str`
ReStructuredText-formatted content
state : ``docutils.statemachine.State``
Usually the directive's ``state`` attribute.
Returns
-------
instance from ``docutils.nodes``
Docutils node representing the ``content``.
"""
# http://www.sphinx-doc.org/en/master/extdev/markupapi.html
# #parsing-directive-content-as-rest
container_node = nodes.section()
container_node.document = state.document
viewlist = ViewList()
for (i, line) in enumerate(content.splitlines()):
viewlist.append(line, source='', offset=i) # depends on [control=['for'], data=[]]
with switch_source_input(state, viewlist):
state.nested_parse(viewlist, 0, container_node) # depends on [control=['with'], data=[]]
return container_node.children
|
def remove(self):
'''
a method to remove collection and all records in the collection
:return: string with confirmation of deletion
'''
title = '%s.remove' % self.__class__.__name__
# request bucket delete
self.s3.delete_bucket(self.bucket_name)
# return confirmation
exit_msg = '%s collection has been removed from S3.' % self.bucket_name
return exit_msg
|
def function[remove, parameter[self]]:
constant[
a method to remove collection and all records in the collection
:return: string with confirmation of deletion
]
variable[title] assign[=] binary_operation[constant[%s.remove] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__]
call[name[self].s3.delete_bucket, parameter[name[self].bucket_name]]
variable[exit_msg] assign[=] binary_operation[constant[%s collection has been removed from S3.] <ast.Mod object at 0x7da2590d6920> name[self].bucket_name]
return[name[exit_msg]]
|
keyword[def] identifier[remove] ( identifier[self] ):
literal[string]
identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
identifier[self] . identifier[s3] . identifier[delete_bucket] ( identifier[self] . identifier[bucket_name] )
identifier[exit_msg] = literal[string] % identifier[self] . identifier[bucket_name]
keyword[return] identifier[exit_msg]
|
def remove(self):
"""
a method to remove collection and all records in the collection
:return: string with confirmation of deletion
"""
title = '%s.remove' % self.__class__.__name__ # request bucket delete
self.s3.delete_bucket(self.bucket_name)
# return confirmation
exit_msg = '%s collection has been removed from S3.' % self.bucket_name
return exit_msg
|
def is_datetime64_ns_dtype(arr_or_dtype):
"""
Check whether the provided array or dtype is of the datetime64[ns] dtype.
Parameters
----------
arr_or_dtype : array-like
The array or dtype to check.
Returns
-------
boolean
Whether or not the array or dtype is of the datetime64[ns] dtype.
Examples
--------
>>> is_datetime64_ns_dtype(str)
False
>>> is_datetime64_ns_dtype(int)
False
>>> is_datetime64_ns_dtype(np.datetime64) # no unit
False
>>> is_datetime64_ns_dtype(DatetimeTZDtype("ns", "US/Eastern"))
True
>>> is_datetime64_ns_dtype(np.array(['a', 'b']))
False
>>> is_datetime64_ns_dtype(np.array([1, 2]))
False
>>> is_datetime64_ns_dtype(np.array([], dtype=np.datetime64)) # no unit
False
>>> is_datetime64_ns_dtype(np.array([],
dtype="datetime64[ps]")) # wrong unit
False
>>> is_datetime64_ns_dtype(pd.DatetimeIndex([1, 2, 3],
dtype=np.datetime64)) # has 'ns' unit
True
"""
if arr_or_dtype is None:
return False
try:
tipo = _get_dtype(arr_or_dtype)
except TypeError:
if is_datetime64tz_dtype(arr_or_dtype):
tipo = _get_dtype(arr_or_dtype.dtype)
else:
return False
return tipo == _NS_DTYPE or getattr(tipo, 'base', None) == _NS_DTYPE
|
def function[is_datetime64_ns_dtype, parameter[arr_or_dtype]]:
constant[
Check whether the provided array or dtype is of the datetime64[ns] dtype.
Parameters
----------
arr_or_dtype : array-like
The array or dtype to check.
Returns
-------
boolean
Whether or not the array or dtype is of the datetime64[ns] dtype.
Examples
--------
>>> is_datetime64_ns_dtype(str)
False
>>> is_datetime64_ns_dtype(int)
False
>>> is_datetime64_ns_dtype(np.datetime64) # no unit
False
>>> is_datetime64_ns_dtype(DatetimeTZDtype("ns", "US/Eastern"))
True
>>> is_datetime64_ns_dtype(np.array(['a', 'b']))
False
>>> is_datetime64_ns_dtype(np.array([1, 2]))
False
>>> is_datetime64_ns_dtype(np.array([], dtype=np.datetime64)) # no unit
False
>>> is_datetime64_ns_dtype(np.array([],
dtype="datetime64[ps]")) # wrong unit
False
>>> is_datetime64_ns_dtype(pd.DatetimeIndex([1, 2, 3],
dtype=np.datetime64)) # has 'ns' unit
True
]
if compare[name[arr_or_dtype] is constant[None]] begin[:]
return[constant[False]]
<ast.Try object at 0x7da1b20282e0>
return[<ast.BoolOp object at 0x7da1b202a650>]
|
keyword[def] identifier[is_datetime64_ns_dtype] ( identifier[arr_or_dtype] ):
literal[string]
keyword[if] identifier[arr_or_dtype] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[try] :
identifier[tipo] = identifier[_get_dtype] ( identifier[arr_or_dtype] )
keyword[except] identifier[TypeError] :
keyword[if] identifier[is_datetime64tz_dtype] ( identifier[arr_or_dtype] ):
identifier[tipo] = identifier[_get_dtype] ( identifier[arr_or_dtype] . identifier[dtype] )
keyword[else] :
keyword[return] keyword[False]
keyword[return] identifier[tipo] == identifier[_NS_DTYPE] keyword[or] identifier[getattr] ( identifier[tipo] , literal[string] , keyword[None] )== identifier[_NS_DTYPE]
|
def is_datetime64_ns_dtype(arr_or_dtype):
"""
Check whether the provided array or dtype is of the datetime64[ns] dtype.
Parameters
----------
arr_or_dtype : array-like
The array or dtype to check.
Returns
-------
boolean
Whether or not the array or dtype is of the datetime64[ns] dtype.
Examples
--------
>>> is_datetime64_ns_dtype(str)
False
>>> is_datetime64_ns_dtype(int)
False
>>> is_datetime64_ns_dtype(np.datetime64) # no unit
False
>>> is_datetime64_ns_dtype(DatetimeTZDtype("ns", "US/Eastern"))
True
>>> is_datetime64_ns_dtype(np.array(['a', 'b']))
False
>>> is_datetime64_ns_dtype(np.array([1, 2]))
False
>>> is_datetime64_ns_dtype(np.array([], dtype=np.datetime64)) # no unit
False
>>> is_datetime64_ns_dtype(np.array([],
dtype="datetime64[ps]")) # wrong unit
False
>>> is_datetime64_ns_dtype(pd.DatetimeIndex([1, 2, 3],
dtype=np.datetime64)) # has 'ns' unit
True
"""
if arr_or_dtype is None:
return False # depends on [control=['if'], data=[]]
try:
tipo = _get_dtype(arr_or_dtype) # depends on [control=['try'], data=[]]
except TypeError:
if is_datetime64tz_dtype(arr_or_dtype):
tipo = _get_dtype(arr_or_dtype.dtype) # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['except'], data=[]]
return tipo == _NS_DTYPE or getattr(tipo, 'base', None) == _NS_DTYPE
|
def assign(self, droplet_id):
"""
Assign the floating IP to a droplet
:param droplet_id: the droplet to assign the floating IP to as either
an ID or a `Droplet` object
:type droplet_id: integer or `Droplet`
:return: an `Action` representing the in-progress operation on the
floating IP
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
"""
if isinstance(droplet_id, Droplet):
droplet_id = droplet_id.id
return self.act(type='assign', droplet_id=droplet_id)
|
def function[assign, parameter[self, droplet_id]]:
constant[
Assign the floating IP to a droplet
:param droplet_id: the droplet to assign the floating IP to as either
an ID or a `Droplet` object
:type droplet_id: integer or `Droplet`
:return: an `Action` representing the in-progress operation on the
floating IP
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
]
if call[name[isinstance], parameter[name[droplet_id], name[Droplet]]] begin[:]
variable[droplet_id] assign[=] name[droplet_id].id
return[call[name[self].act, parameter[]]]
|
keyword[def] identifier[assign] ( identifier[self] , identifier[droplet_id] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[droplet_id] , identifier[Droplet] ):
identifier[droplet_id] = identifier[droplet_id] . identifier[id]
keyword[return] identifier[self] . identifier[act] ( identifier[type] = literal[string] , identifier[droplet_id] = identifier[droplet_id] )
|
def assign(self, droplet_id):
"""
Assign the floating IP to a droplet
:param droplet_id: the droplet to assign the floating IP to as either
an ID or a `Droplet` object
:type droplet_id: integer or `Droplet`
:return: an `Action` representing the in-progress operation on the
floating IP
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
"""
if isinstance(droplet_id, Droplet):
droplet_id = droplet_id.id # depends on [control=['if'], data=[]]
return self.act(type='assign', droplet_id=droplet_id)
|
def is_alert_present(self):
"""Tests if an alert is present
@return: True if alert is present, False otherwise
"""
current_frame = None
try:
current_frame = self.driver.current_window_handle
a = self.driver.switch_to_alert()
a.text
except NoAlertPresentException:
# No alert
return False
except UnexpectedAlertPresentException:
# Alert exists
return True
finally:
if current_frame:
self.driver.switch_to_window(current_frame)
return True
|
def function[is_alert_present, parameter[self]]:
constant[Tests if an alert is present
@return: True if alert is present, False otherwise
]
variable[current_frame] assign[=] constant[None]
<ast.Try object at 0x7da1b1075960>
return[constant[True]]
|
keyword[def] identifier[is_alert_present] ( identifier[self] ):
literal[string]
identifier[current_frame] = keyword[None]
keyword[try] :
identifier[current_frame] = identifier[self] . identifier[driver] . identifier[current_window_handle]
identifier[a] = identifier[self] . identifier[driver] . identifier[switch_to_alert] ()
identifier[a] . identifier[text]
keyword[except] identifier[NoAlertPresentException] :
keyword[return] keyword[False]
keyword[except] identifier[UnexpectedAlertPresentException] :
keyword[return] keyword[True]
keyword[finally] :
keyword[if] identifier[current_frame] :
identifier[self] . identifier[driver] . identifier[switch_to_window] ( identifier[current_frame] )
keyword[return] keyword[True]
|
def is_alert_present(self):
"""Tests if an alert is present
@return: True if alert is present, False otherwise
"""
current_frame = None
try:
current_frame = self.driver.current_window_handle
a = self.driver.switch_to_alert()
a.text # depends on [control=['try'], data=[]]
except NoAlertPresentException:
# No alert
return False # depends on [control=['except'], data=[]]
except UnexpectedAlertPresentException:
# Alert exists
return True # depends on [control=['except'], data=[]]
finally:
if current_frame:
self.driver.switch_to_window(current_frame) # depends on [control=['if'], data=[]]
return True
|
def create_brand(cls, brand, **kwargs):
"""Create Brand
Create a new Brand
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_brand(brand, async=True)
>>> result = thread.get()
:param async bool
:param Brand brand: Attributes of brand to create (required)
:return: Brand
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_brand_with_http_info(brand, **kwargs)
else:
(data) = cls._create_brand_with_http_info(brand, **kwargs)
return data
|
def function[create_brand, parameter[cls, brand]]:
constant[Create Brand
Create a new Brand
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_brand(brand, async=True)
>>> result = thread.get()
:param async bool
:param Brand brand: Attributes of brand to create (required)
:return: Brand
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._create_brand_with_http_info, parameter[name[brand]]]]
|
keyword[def] identifier[create_brand] ( identifier[cls] , identifier[brand] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_create_brand_with_http_info] ( identifier[brand] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_create_brand_with_http_info] ( identifier[brand] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def create_brand(cls, brand, **kwargs):
"""Create Brand
Create a new Brand
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_brand(brand, async=True)
>>> result = thread.get()
:param async bool
:param Brand brand: Attributes of brand to create (required)
:return: Brand
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_brand_with_http_info(brand, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._create_brand_with_http_info(brand, **kwargs)
return data
|
def display_user(value, arg):
''' Return 'You' if value is equal to arg.
Parameters:
value should be a userprofile
arg should be another user.
Ideally, value should be a userprofile from an object and arg the user logged in.
'''
if value.user == arg and arg.username != ANONYMOUS_USERNAME:
return "You"
else:
return value.user.get_full_name()
|
def function[display_user, parameter[value, arg]]:
constant[ Return 'You' if value is equal to arg.
Parameters:
value should be a userprofile
arg should be another user.
Ideally, value should be a userprofile from an object and arg the user logged in.
]
if <ast.BoolOp object at 0x7da1b2346dd0> begin[:]
return[constant[You]]
|
keyword[def] identifier[display_user] ( identifier[value] , identifier[arg] ):
literal[string]
keyword[if] identifier[value] . identifier[user] == identifier[arg] keyword[and] identifier[arg] . identifier[username] != identifier[ANONYMOUS_USERNAME] :
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[value] . identifier[user] . identifier[get_full_name] ()
|
def display_user(value, arg):
""" Return 'You' if value is equal to arg.
Parameters:
value should be a userprofile
arg should be another user.
Ideally, value should be a userprofile from an object and arg the user logged in.
"""
if value.user == arg and arg.username != ANONYMOUS_USERNAME:
return 'You' # depends on [control=['if'], data=[]]
else:
return value.user.get_full_name()
|
def set_issuer(self, issuer):
"""
Set the issuer of this certificate.
:param issuer: The issuer.
:type issuer: :py:class:`X509Name`
:return: ``None``
"""
self._set_name(_lib.X509_set_issuer_name, issuer)
self._issuer_invalidator.clear()
|
def function[set_issuer, parameter[self, issuer]]:
constant[
Set the issuer of this certificate.
:param issuer: The issuer.
:type issuer: :py:class:`X509Name`
:return: ``None``
]
call[name[self]._set_name, parameter[name[_lib].X509_set_issuer_name, name[issuer]]]
call[name[self]._issuer_invalidator.clear, parameter[]]
|
keyword[def] identifier[set_issuer] ( identifier[self] , identifier[issuer] ):
literal[string]
identifier[self] . identifier[_set_name] ( identifier[_lib] . identifier[X509_set_issuer_name] , identifier[issuer] )
identifier[self] . identifier[_issuer_invalidator] . identifier[clear] ()
|
def set_issuer(self, issuer):
"""
Set the issuer of this certificate.
:param issuer: The issuer.
:type issuer: :py:class:`X509Name`
:return: ``None``
"""
self._set_name(_lib.X509_set_issuer_name, issuer)
self._issuer_invalidator.clear()
|
def list_required(self, type=None, service=None): # pylint: disable=redefined-builtin
"""
Displays all packages required by the current role
based on the documented services provided.
"""
from burlap.common import (
required_system_packages,
required_python_packages,
required_ruby_packages,
)
service = (service or '').strip().upper()
type = (type or '').lower().strip()
assert not type or type in PACKAGE_TYPES, 'Unknown package type: %s' % (type,)
packages_set = set()
packages = []
version = self.os_version
for _service, satchel in self.all_other_enabled_satchels.items():
_service = _service.strip().upper()
if service and service != _service:
continue
_new = []
if not type or type == SYSTEM:
#TODO:deprecated, remove
_new.extend(required_system_packages.get(
_service, {}).get((version.distro, version.release), []))
try:
_pkgs = satchel.packager_system_packages
if self.verbose:
print('pkgs:')
pprint(_pkgs, indent=4)
for _key in [(version.distro, version.release), version.distro]:
if self.verbose:
print('checking key:', _key)
if _key in _pkgs:
if self.verbose:
print('satchel %s requires:' % satchel, _pkgs[_key])
_new.extend(_pkgs[_key])
break
except AttributeError:
pass
if not type or type == PYTHON:
#TODO:deprecated, remove
_new.extend(required_python_packages.get(
_service, {}).get((version.distro, version.release), []))
try:
_pkgs = satchel.packager_python_packages
for _key in [(version.distro, version.release), version.distro]:
if _key in _pkgs:
_new.extend(_pkgs[_key])
except AttributeError:
pass
print('_new:', _new)
if not type or type == RUBY:
#TODO:deprecated, remove
_new.extend(required_ruby_packages.get(
_service, {}).get((version.distro, version.release), []))
for _ in _new:
if _ in packages_set:
continue
packages_set.add(_)
packages.append(_)
if self.verbose:
for package in sorted(packages):
print('package:', package)
return packages
|
def function[list_required, parameter[self, type, service]]:
constant[
Displays all packages required by the current role
based on the documented services provided.
]
from relative_module[burlap.common] import module[required_system_packages], module[required_python_packages], module[required_ruby_packages]
variable[service] assign[=] call[call[<ast.BoolOp object at 0x7da1b00e3ac0>.strip, parameter[]].upper, parameter[]]
variable[type] assign[=] call[call[<ast.BoolOp object at 0x7da1b00e1c30>.lower, parameter[]].strip, parameter[]]
assert[<ast.BoolOp object at 0x7da1b00ebb80>]
variable[packages_set] assign[=] call[name[set], parameter[]]
variable[packages] assign[=] list[[]]
variable[version] assign[=] name[self].os_version
for taget[tuple[[<ast.Name object at 0x7da1b00db100>, <ast.Name object at 0x7da1b00dac80>]]] in starred[call[name[self].all_other_enabled_satchels.items, parameter[]]] begin[:]
variable[_service] assign[=] call[call[name[_service].strip, parameter[]].upper, parameter[]]
if <ast.BoolOp object at 0x7da1b00d8df0> begin[:]
continue
variable[_new] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b00d8670> begin[:]
call[name[_new].extend, parameter[call[call[name[required_system_packages].get, parameter[name[_service], dictionary[[], []]]].get, parameter[tuple[[<ast.Attribute object at 0x7da1b0088c40>, <ast.Attribute object at 0x7da1b008b7f0>]], list[[]]]]]]
<ast.Try object at 0x7da1b0088c10>
if <ast.BoolOp object at 0x7da1b0088130> begin[:]
call[name[_new].extend, parameter[call[call[name[required_python_packages].get, parameter[name[_service], dictionary[[], []]]].get, parameter[tuple[[<ast.Attribute object at 0x7da1b008a3b0>, <ast.Attribute object at 0x7da1b0089c90>]], list[[]]]]]]
<ast.Try object at 0x7da1b008abf0>
call[name[print], parameter[constant[_new:], name[_new]]]
if <ast.BoolOp object at 0x7da1b0050e80> begin[:]
call[name[_new].extend, parameter[call[call[name[required_ruby_packages].get, parameter[name[_service], dictionary[[], []]]].get, parameter[tuple[[<ast.Attribute object at 0x7da1b00500a0>, <ast.Attribute object at 0x7da1b0053ca0>]], list[[]]]]]]
for taget[name[_]] in starred[name[_new]] begin[:]
if compare[name[_] in name[packages_set]] begin[:]
continue
call[name[packages_set].add, parameter[name[_]]]
call[name[packages].append, parameter[name[_]]]
if name[self].verbose begin[:]
for taget[name[package]] in starred[call[name[sorted], parameter[name[packages]]]] begin[:]
call[name[print], parameter[constant[package:], name[package]]]
return[name[packages]]
|
keyword[def] identifier[list_required] ( identifier[self] , identifier[type] = keyword[None] , identifier[service] = keyword[None] ):
literal[string]
keyword[from] identifier[burlap] . identifier[common] keyword[import] (
identifier[required_system_packages] ,
identifier[required_python_packages] ,
identifier[required_ruby_packages] ,
)
identifier[service] =( identifier[service] keyword[or] literal[string] ). identifier[strip] (). identifier[upper] ()
identifier[type] =( identifier[type] keyword[or] literal[string] ). identifier[lower] (). identifier[strip] ()
keyword[assert] keyword[not] identifier[type] keyword[or] identifier[type] keyword[in] identifier[PACKAGE_TYPES] , literal[string] %( identifier[type] ,)
identifier[packages_set] = identifier[set] ()
identifier[packages] =[]
identifier[version] = identifier[self] . identifier[os_version]
keyword[for] identifier[_service] , identifier[satchel] keyword[in] identifier[self] . identifier[all_other_enabled_satchels] . identifier[items] ():
identifier[_service] = identifier[_service] . identifier[strip] (). identifier[upper] ()
keyword[if] identifier[service] keyword[and] identifier[service] != identifier[_service] :
keyword[continue]
identifier[_new] =[]
keyword[if] keyword[not] identifier[type] keyword[or] identifier[type] == identifier[SYSTEM] :
identifier[_new] . identifier[extend] ( identifier[required_system_packages] . identifier[get] (
identifier[_service] ,{}). identifier[get] (( identifier[version] . identifier[distro] , identifier[version] . identifier[release] ),[]))
keyword[try] :
identifier[_pkgs] = identifier[satchel] . identifier[packager_system_packages]
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] )
identifier[pprint] ( identifier[_pkgs] , identifier[indent] = literal[int] )
keyword[for] identifier[_key] keyword[in] [( identifier[version] . identifier[distro] , identifier[version] . identifier[release] ), identifier[version] . identifier[distro] ]:
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] , identifier[_key] )
keyword[if] identifier[_key] keyword[in] identifier[_pkgs] :
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] % identifier[satchel] , identifier[_pkgs] [ identifier[_key] ])
identifier[_new] . identifier[extend] ( identifier[_pkgs] [ identifier[_key] ])
keyword[break]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] keyword[not] identifier[type] keyword[or] identifier[type] == identifier[PYTHON] :
identifier[_new] . identifier[extend] ( identifier[required_python_packages] . identifier[get] (
identifier[_service] ,{}). identifier[get] (( identifier[version] . identifier[distro] , identifier[version] . identifier[release] ),[]))
keyword[try] :
identifier[_pkgs] = identifier[satchel] . identifier[packager_python_packages]
keyword[for] identifier[_key] keyword[in] [( identifier[version] . identifier[distro] , identifier[version] . identifier[release] ), identifier[version] . identifier[distro] ]:
keyword[if] identifier[_key] keyword[in] identifier[_pkgs] :
identifier[_new] . identifier[extend] ( identifier[_pkgs] [ identifier[_key] ])
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[print] ( literal[string] , identifier[_new] )
keyword[if] keyword[not] identifier[type] keyword[or] identifier[type] == identifier[RUBY] :
identifier[_new] . identifier[extend] ( identifier[required_ruby_packages] . identifier[get] (
identifier[_service] ,{}). identifier[get] (( identifier[version] . identifier[distro] , identifier[version] . identifier[release] ),[]))
keyword[for] identifier[_] keyword[in] identifier[_new] :
keyword[if] identifier[_] keyword[in] identifier[packages_set] :
keyword[continue]
identifier[packages_set] . identifier[add] ( identifier[_] )
identifier[packages] . identifier[append] ( identifier[_] )
keyword[if] identifier[self] . identifier[verbose] :
keyword[for] identifier[package] keyword[in] identifier[sorted] ( identifier[packages] ):
identifier[print] ( literal[string] , identifier[package] )
keyword[return] identifier[packages]
|
def list_required(self, type=None, service=None): # pylint: disable=redefined-builtin
'\n Displays all packages required by the current role\n based on the documented services provided.\n '
from burlap.common import required_system_packages, required_python_packages, required_ruby_packages
service = (service or '').strip().upper()
type = (type or '').lower().strip()
assert not type or type in PACKAGE_TYPES, 'Unknown package type: %s' % (type,)
packages_set = set()
packages = []
version = self.os_version
for (_service, satchel) in self.all_other_enabled_satchels.items():
_service = _service.strip().upper()
if service and service != _service:
continue # depends on [control=['if'], data=[]]
_new = []
if not type or type == SYSTEM:
#TODO:deprecated, remove
_new.extend(required_system_packages.get(_service, {}).get((version.distro, version.release), []))
try:
_pkgs = satchel.packager_system_packages
if self.verbose:
print('pkgs:')
pprint(_pkgs, indent=4) # depends on [control=['if'], data=[]]
for _key in [(version.distro, version.release), version.distro]:
if self.verbose:
print('checking key:', _key) # depends on [control=['if'], data=[]]
if _key in _pkgs:
if self.verbose:
print('satchel %s requires:' % satchel, _pkgs[_key]) # depends on [control=['if'], data=[]]
_new.extend(_pkgs[_key])
break # depends on [control=['if'], data=['_key', '_pkgs']] # depends on [control=['for'], data=['_key']] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if not type or type == PYTHON:
#TODO:deprecated, remove
_new.extend(required_python_packages.get(_service, {}).get((version.distro, version.release), []))
try:
_pkgs = satchel.packager_python_packages
for _key in [(version.distro, version.release), version.distro]:
if _key in _pkgs:
_new.extend(_pkgs[_key]) # depends on [control=['if'], data=['_key', '_pkgs']] # depends on [control=['for'], data=['_key']] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
print('_new:', _new) # depends on [control=['if'], data=[]]
if not type or type == RUBY:
#TODO:deprecated, remove
_new.extend(required_ruby_packages.get(_service, {}).get((version.distro, version.release), [])) # depends on [control=['if'], data=[]]
for _ in _new:
if _ in packages_set:
continue # depends on [control=['if'], data=[]]
packages_set.add(_)
packages.append(_) # depends on [control=['for'], data=['_']] # depends on [control=['for'], data=[]]
if self.verbose:
for package in sorted(packages):
print('package:', package) # depends on [control=['for'], data=['package']] # depends on [control=['if'], data=[]]
return packages
|
def multilingual_flatpage(request, url):
"""
Multilingual flat page view.
Models: `multilingual.flatpages.models`
Templates: Uses the template defined by the ``template_name`` field,
or `flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.endswith('/') and settings.APPEND_SLASH:
return HttpResponseRedirect("%s/" % request.path)
if not url.startswith('/'):
url = "/" + url
f = get_object_or_404(MultilingualFlatPage, url__exact=url, sites__id__exact=settings.SITE_ID)
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
# Serve the content in the language defined by the Django translation module
# if possible else serve the default language.
f._default_language = get_language()
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
t = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
c = RequestContext(request, {
'flatpage': f,
})
response = HttpResponse(t.render(c))
populate_xheaders(request, response, MultilingualFlatPage, f.id)
return response
|
def function[multilingual_flatpage, parameter[request, url]]:
constant[
Multilingual flat page view.
Models: `multilingual.flatpages.models`
Templates: Uses the template defined by the ``template_name`` field,
or `flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
]
if <ast.BoolOp object at 0x7da20c6c4580> begin[:]
return[call[name[HttpResponseRedirect], parameter[binary_operation[constant[%s/] <ast.Mod object at 0x7da2590d6920> name[request].path]]]]
if <ast.UnaryOp object at 0x7da20c76d000> begin[:]
variable[url] assign[=] binary_operation[constant[/] + name[url]]
variable[f] assign[=] call[name[get_object_or_404], parameter[name[MultilingualFlatPage]]]
if <ast.BoolOp object at 0x7da20c76d750> begin[:]
from relative_module[django.contrib.auth.views] import module[redirect_to_login]
return[call[name[redirect_to_login], parameter[name[request].path]]]
name[f]._default_language assign[=] call[name[get_language], parameter[]]
if name[f].template_name begin[:]
variable[t] assign[=] call[name[loader].select_template, parameter[tuple[[<ast.Attribute object at 0x7da20c76ca90>, <ast.Name object at 0x7da20c76c370>]]]]
name[f].title assign[=] call[name[mark_safe], parameter[name[f].title]]
name[f].content assign[=] call[name[mark_safe], parameter[name[f].content]]
variable[c] assign[=] call[name[RequestContext], parameter[name[request], dictionary[[<ast.Constant object at 0x7da20c6c4fa0>], [<ast.Name object at 0x7da20c6c79d0>]]]]
variable[response] assign[=] call[name[HttpResponse], parameter[call[name[t].render, parameter[name[c]]]]]
call[name[populate_xheaders], parameter[name[request], name[response], name[MultilingualFlatPage], name[f].id]]
return[name[response]]
|
keyword[def] identifier[multilingual_flatpage] ( identifier[request] , identifier[url] ):
literal[string]
keyword[if] keyword[not] identifier[url] . identifier[endswith] ( literal[string] ) keyword[and] identifier[settings] . identifier[APPEND_SLASH] :
keyword[return] identifier[HttpResponseRedirect] ( literal[string] % identifier[request] . identifier[path] )
keyword[if] keyword[not] identifier[url] . identifier[startswith] ( literal[string] ):
identifier[url] = literal[string] + identifier[url]
identifier[f] = identifier[get_object_or_404] ( identifier[MultilingualFlatPage] , identifier[url__exact] = identifier[url] , identifier[sites__id__exact] = identifier[settings] . identifier[SITE_ID] )
keyword[if] identifier[f] . identifier[registration_required] keyword[and] keyword[not] identifier[request] . identifier[user] . identifier[is_authenticated] ():
keyword[from] identifier[django] . identifier[contrib] . identifier[auth] . identifier[views] keyword[import] identifier[redirect_to_login]
keyword[return] identifier[redirect_to_login] ( identifier[request] . identifier[path] )
identifier[f] . identifier[_default_language] = identifier[get_language] ()
keyword[if] identifier[f] . identifier[template_name] :
identifier[t] = identifier[loader] . identifier[select_template] (( identifier[f] . identifier[template_name] , identifier[DEFAULT_TEMPLATE] ))
keyword[else] :
identifier[t] = identifier[loader] . identifier[get_template] ( identifier[DEFAULT_TEMPLATE] )
identifier[f] . identifier[title] = identifier[mark_safe] ( identifier[f] . identifier[title] )
identifier[f] . identifier[content] = identifier[mark_safe] ( identifier[f] . identifier[content] )
identifier[c] = identifier[RequestContext] ( identifier[request] ,{
literal[string] : identifier[f] ,
})
identifier[response] = identifier[HttpResponse] ( identifier[t] . identifier[render] ( identifier[c] ))
identifier[populate_xheaders] ( identifier[request] , identifier[response] , identifier[MultilingualFlatPage] , identifier[f] . identifier[id] )
keyword[return] identifier[response]
|
def multilingual_flatpage(request, url):
"""
Multilingual flat page view.
Models: `multilingual.flatpages.models`
Templates: Uses the template defined by the ``template_name`` field,
or `flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.endswith('/') and settings.APPEND_SLASH:
return HttpResponseRedirect('%s/' % request.path) # depends on [control=['if'], data=[]]
if not url.startswith('/'):
url = '/' + url # depends on [control=['if'], data=[]]
f = get_object_or_404(MultilingualFlatPage, url__exact=url, sites__id__exact=settings.SITE_ID)
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and (not request.user.is_authenticated()):
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path) # depends on [control=['if'], data=[]]
# Serve the content in the language defined by the Django translation module
# if possible else serve the default language.
f._default_language = get_language()
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE)) # depends on [control=['if'], data=[]]
else:
t = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
c = RequestContext(request, {'flatpage': f})
response = HttpResponse(t.render(c))
populate_xheaders(request, response, MultilingualFlatPage, f.id)
return response
|
def check_mod_enabled(mod):
'''
Checks to see if the specific mod symlink is in /etc/apache2/mods-enabled.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.check_mod_enabled status
salt '*' apache.check_mod_enabled status.load
salt '*' apache.check_mod_enabled status.conf
'''
if mod.endswith('.load') or mod.endswith('.conf'):
mod_file = mod
else:
mod_file = '{0}.load'.format(mod)
return os.path.islink('/etc/apache2/mods-enabled/{0}'.format(mod_file))
|
def function[check_mod_enabled, parameter[mod]]:
constant[
Checks to see if the specific mod symlink is in /etc/apache2/mods-enabled.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.check_mod_enabled status
salt '*' apache.check_mod_enabled status.load
salt '*' apache.check_mod_enabled status.conf
]
if <ast.BoolOp object at 0x7da1b1c9b430> begin[:]
variable[mod_file] assign[=] name[mod]
return[call[name[os].path.islink, parameter[call[constant[/etc/apache2/mods-enabled/{0}].format, parameter[name[mod_file]]]]]]
|
keyword[def] identifier[check_mod_enabled] ( identifier[mod] ):
literal[string]
keyword[if] identifier[mod] . identifier[endswith] ( literal[string] ) keyword[or] identifier[mod] . identifier[endswith] ( literal[string] ):
identifier[mod_file] = identifier[mod]
keyword[else] :
identifier[mod_file] = literal[string] . identifier[format] ( identifier[mod] )
keyword[return] identifier[os] . identifier[path] . identifier[islink] ( literal[string] . identifier[format] ( identifier[mod_file] ))
|
def check_mod_enabled(mod):
"""
Checks to see if the specific mod symlink is in /etc/apache2/mods-enabled.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.check_mod_enabled status
salt '*' apache.check_mod_enabled status.load
salt '*' apache.check_mod_enabled status.conf
"""
if mod.endswith('.load') or mod.endswith('.conf'):
mod_file = mod # depends on [control=['if'], data=[]]
else:
mod_file = '{0}.load'.format(mod)
return os.path.islink('/etc/apache2/mods-enabled/{0}'.format(mod_file))
|
def get_dummies(
data,
prefix=None,
prefix_sep="_",
dummy_na=False,
columns=None,
sparse=False,
drop_first=False,
dtype=None,
):
"""Convert categorical variable into indicator variables.
Args:
data (array-like, Series, or DataFrame): data to encode.
prefix (string, [string]): Prefix to apply to each encoded column
label.
prefix_sep (string, [string]): Separator between prefix and value.
dummy_na (bool): Add a column to indicate NaNs.
columns: Which columns to encode.
sparse (bool): Not Implemented: If True, returns SparseDataFrame.
drop_first (bool): Whether to remove the first level of encoded data.
dtype: The dtype for the get_dummies call.
Returns:
DataFrame or one-hot encoded data.
"""
if sparse:
raise NotImplementedError(
"SparseDataFrame is not implemented. "
"To contribute to Modin, please visit "
"github.com/modin-project/modin."
)
if not isinstance(data, DataFrame):
ErrorMessage.default_to_pandas("`get_dummies` on non-DataFrame")
return DataFrame(
pandas.get_dummies(
data,
prefix=prefix,
prefix_sep=prefix_sep,
dummy_na=dummy_na,
columns=columns,
sparse=sparse,
drop_first=drop_first,
dtype=dtype,
)
)
else:
new_manager = data._query_compiler.get_dummies(
columns,
prefix=prefix,
prefix_sep=prefix_sep,
dummy_na=dummy_na,
drop_first=drop_first,
dtype=dtype,
)
return DataFrame(query_compiler=new_manager)
|
def function[get_dummies, parameter[data, prefix, prefix_sep, dummy_na, columns, sparse, drop_first, dtype]]:
constant[Convert categorical variable into indicator variables.
Args:
data (array-like, Series, or DataFrame): data to encode.
prefix (string, [string]): Prefix to apply to each encoded column
label.
prefix_sep (string, [string]): Separator between prefix and value.
dummy_na (bool): Add a column to indicate NaNs.
columns: Which columns to encode.
sparse (bool): Not Implemented: If True, returns SparseDataFrame.
drop_first (bool): Whether to remove the first level of encoded data.
dtype: The dtype for the get_dummies call.
Returns:
DataFrame or one-hot encoded data.
]
if name[sparse] begin[:]
<ast.Raise object at 0x7da20c6e73a0>
if <ast.UnaryOp object at 0x7da20c6e6290> begin[:]
call[name[ErrorMessage].default_to_pandas, parameter[constant[`get_dummies` on non-DataFrame]]]
return[call[name[DataFrame], parameter[call[name[pandas].get_dummies, parameter[name[data]]]]]]
|
keyword[def] identifier[get_dummies] (
identifier[data] ,
identifier[prefix] = keyword[None] ,
identifier[prefix_sep] = literal[string] ,
identifier[dummy_na] = keyword[False] ,
identifier[columns] = keyword[None] ,
identifier[sparse] = keyword[False] ,
identifier[drop_first] = keyword[False] ,
identifier[dtype] = keyword[None] ,
):
literal[string]
keyword[if] identifier[sparse] :
keyword[raise] identifier[NotImplementedError] (
literal[string]
literal[string]
literal[string]
)
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[DataFrame] ):
identifier[ErrorMessage] . identifier[default_to_pandas] ( literal[string] )
keyword[return] identifier[DataFrame] (
identifier[pandas] . identifier[get_dummies] (
identifier[data] ,
identifier[prefix] = identifier[prefix] ,
identifier[prefix_sep] = identifier[prefix_sep] ,
identifier[dummy_na] = identifier[dummy_na] ,
identifier[columns] = identifier[columns] ,
identifier[sparse] = identifier[sparse] ,
identifier[drop_first] = identifier[drop_first] ,
identifier[dtype] = identifier[dtype] ,
)
)
keyword[else] :
identifier[new_manager] = identifier[data] . identifier[_query_compiler] . identifier[get_dummies] (
identifier[columns] ,
identifier[prefix] = identifier[prefix] ,
identifier[prefix_sep] = identifier[prefix_sep] ,
identifier[dummy_na] = identifier[dummy_na] ,
identifier[drop_first] = identifier[drop_first] ,
identifier[dtype] = identifier[dtype] ,
)
keyword[return] identifier[DataFrame] ( identifier[query_compiler] = identifier[new_manager] )
|
def get_dummies(data, prefix=None, prefix_sep='_', dummy_na=False, columns=None, sparse=False, drop_first=False, dtype=None):
"""Convert categorical variable into indicator variables.
Args:
data (array-like, Series, or DataFrame): data to encode.
prefix (string, [string]): Prefix to apply to each encoded column
label.
prefix_sep (string, [string]): Separator between prefix and value.
dummy_na (bool): Add a column to indicate NaNs.
columns: Which columns to encode.
sparse (bool): Not Implemented: If True, returns SparseDataFrame.
drop_first (bool): Whether to remove the first level of encoded data.
dtype: The dtype for the get_dummies call.
Returns:
DataFrame or one-hot encoded data.
"""
if sparse:
raise NotImplementedError('SparseDataFrame is not implemented. To contribute to Modin, please visit github.com/modin-project/modin.') # depends on [control=['if'], data=[]]
if not isinstance(data, DataFrame):
ErrorMessage.default_to_pandas('`get_dummies` on non-DataFrame')
return DataFrame(pandas.get_dummies(data, prefix=prefix, prefix_sep=prefix_sep, dummy_na=dummy_na, columns=columns, sparse=sparse, drop_first=drop_first, dtype=dtype)) # depends on [control=['if'], data=[]]
else:
new_manager = data._query_compiler.get_dummies(columns, prefix=prefix, prefix_sep=prefix_sep, dummy_na=dummy_na, drop_first=drop_first, dtype=dtype)
return DataFrame(query_compiler=new_manager)
|
def render_tree(tree, list_all=True, show_only=None, frozen=False, exclude=None):
"""Convert tree to string representation
:param dict tree: the package tree
:param bool list_all: whether to list all the pgks at the root
level or only those that are the
sub-dependencies
:param set show_only: set of select packages to be shown in the
output. This is optional arg, default: None.
:param bool frozen: whether or not show the names of the pkgs in
the output that's favourable to pip --freeze
:param set exclude: set of select packages to be excluded from the
output. This is optional arg, default: None.
:returns: string representation of the tree
:rtype: str
"""
tree = sorted_tree(tree)
branch_keys = set(r.key for r in flatten(tree.values()))
nodes = tree.keys()
use_bullets = not frozen
key_tree = dict((k.key, v) for k, v in tree.items())
get_children = lambda n: key_tree.get(n.key, [])
if show_only:
nodes = [p for p in nodes
if p.key in show_only or p.project_name in show_only]
elif not list_all:
nodes = [p for p in nodes if p.key not in branch_keys]
def aux(node, parent=None, indent=0, chain=None):
if exclude and (node.key in exclude or node.project_name in exclude):
return []
if chain is None:
chain = [node.project_name]
node_str = node.render(parent, frozen)
if parent:
prefix = ' '*indent + ('- ' if use_bullets else '')
node_str = prefix + node_str
result = [node_str]
children = [aux(c, node, indent=indent+2,
chain=chain+[c.project_name])
for c in get_children(node)
if c.project_name not in chain]
result += list(flatten(children))
return result
lines = flatten([aux(p) for p in nodes])
return '\n'.join(lines)
|
def function[render_tree, parameter[tree, list_all, show_only, frozen, exclude]]:
constant[Convert tree to string representation
:param dict tree: the package tree
:param bool list_all: whether to list all the pgks at the root
level or only those that are the
sub-dependencies
:param set show_only: set of select packages to be shown in the
output. This is optional arg, default: None.
:param bool frozen: whether or not show the names of the pkgs in
the output that's favourable to pip --freeze
:param set exclude: set of select packages to be excluded from the
output. This is optional arg, default: None.
:returns: string representation of the tree
:rtype: str
]
variable[tree] assign[=] call[name[sorted_tree], parameter[name[tree]]]
variable[branch_keys] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1e03e80>]]
variable[nodes] assign[=] call[name[tree].keys, parameter[]]
variable[use_bullets] assign[=] <ast.UnaryOp object at 0x7da1b1e03610>
variable[key_tree] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b1e03580>]]
variable[get_children] assign[=] <ast.Lambda object at 0x7da1b1e03a60>
if name[show_only] begin[:]
variable[nodes] assign[=] <ast.ListComp object at 0x7da1b1e03250>
def function[aux, parameter[node, parent, indent, chain]]:
if <ast.BoolOp object at 0x7da1b1e01420> begin[:]
return[list[[]]]
if compare[name[chain] is constant[None]] begin[:]
variable[chain] assign[=] list[[<ast.Attribute object at 0x7da1b1e01780>]]
variable[node_str] assign[=] call[name[node].render, parameter[name[parent], name[frozen]]]
if name[parent] begin[:]
variable[prefix] assign[=] binary_operation[binary_operation[constant[ ] * name[indent]] + <ast.IfExp object at 0x7da1b1e01ba0>]
variable[node_str] assign[=] binary_operation[name[prefix] + name[node_str]]
variable[result] assign[=] list[[<ast.Name object at 0x7da1b1e01e10>]]
variable[children] assign[=] <ast.ListComp object at 0x7da1b1e01ed0>
<ast.AugAssign object at 0x7da1b1e00d30>
return[name[result]]
variable[lines] assign[=] call[name[flatten], parameter[<ast.ListComp object at 0x7da1b1e02140>]]
return[call[constant[
].join, parameter[name[lines]]]]
|
keyword[def] identifier[render_tree] ( identifier[tree] , identifier[list_all] = keyword[True] , identifier[show_only] = keyword[None] , identifier[frozen] = keyword[False] , identifier[exclude] = keyword[None] ):
literal[string]
identifier[tree] = identifier[sorted_tree] ( identifier[tree] )
identifier[branch_keys] = identifier[set] ( identifier[r] . identifier[key] keyword[for] identifier[r] keyword[in] identifier[flatten] ( identifier[tree] . identifier[values] ()))
identifier[nodes] = identifier[tree] . identifier[keys] ()
identifier[use_bullets] = keyword[not] identifier[frozen]
identifier[key_tree] = identifier[dict] (( identifier[k] . identifier[key] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[tree] . identifier[items] ())
identifier[get_children] = keyword[lambda] identifier[n] : identifier[key_tree] . identifier[get] ( identifier[n] . identifier[key] ,[])
keyword[if] identifier[show_only] :
identifier[nodes] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[nodes]
keyword[if] identifier[p] . identifier[key] keyword[in] identifier[show_only] keyword[or] identifier[p] . identifier[project_name] keyword[in] identifier[show_only] ]
keyword[elif] keyword[not] identifier[list_all] :
identifier[nodes] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[nodes] keyword[if] identifier[p] . identifier[key] keyword[not] keyword[in] identifier[branch_keys] ]
keyword[def] identifier[aux] ( identifier[node] , identifier[parent] = keyword[None] , identifier[indent] = literal[int] , identifier[chain] = keyword[None] ):
keyword[if] identifier[exclude] keyword[and] ( identifier[node] . identifier[key] keyword[in] identifier[exclude] keyword[or] identifier[node] . identifier[project_name] keyword[in] identifier[exclude] ):
keyword[return] []
keyword[if] identifier[chain] keyword[is] keyword[None] :
identifier[chain] =[ identifier[node] . identifier[project_name] ]
identifier[node_str] = identifier[node] . identifier[render] ( identifier[parent] , identifier[frozen] )
keyword[if] identifier[parent] :
identifier[prefix] = literal[string] * identifier[indent] +( literal[string] keyword[if] identifier[use_bullets] keyword[else] literal[string] )
identifier[node_str] = identifier[prefix] + identifier[node_str]
identifier[result] =[ identifier[node_str] ]
identifier[children] =[ identifier[aux] ( identifier[c] , identifier[node] , identifier[indent] = identifier[indent] + literal[int] ,
identifier[chain] = identifier[chain] +[ identifier[c] . identifier[project_name] ])
keyword[for] identifier[c] keyword[in] identifier[get_children] ( identifier[node] )
keyword[if] identifier[c] . identifier[project_name] keyword[not] keyword[in] identifier[chain] ]
identifier[result] += identifier[list] ( identifier[flatten] ( identifier[children] ))
keyword[return] identifier[result]
identifier[lines] = identifier[flatten] ([ identifier[aux] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[nodes] ])
keyword[return] literal[string] . identifier[join] ( identifier[lines] )
|
def render_tree(tree, list_all=True, show_only=None, frozen=False, exclude=None):
"""Convert tree to string representation
:param dict tree: the package tree
:param bool list_all: whether to list all the pgks at the root
level or only those that are the
sub-dependencies
:param set show_only: set of select packages to be shown in the
output. This is optional arg, default: None.
:param bool frozen: whether or not show the names of the pkgs in
the output that's favourable to pip --freeze
:param set exclude: set of select packages to be excluded from the
output. This is optional arg, default: None.
:returns: string representation of the tree
:rtype: str
"""
tree = sorted_tree(tree)
branch_keys = set((r.key for r in flatten(tree.values())))
nodes = tree.keys()
use_bullets = not frozen
key_tree = dict(((k.key, v) for (k, v) in tree.items()))
get_children = lambda n: key_tree.get(n.key, [])
if show_only:
nodes = [p for p in nodes if p.key in show_only or p.project_name in show_only] # depends on [control=['if'], data=[]]
elif not list_all:
nodes = [p for p in nodes if p.key not in branch_keys] # depends on [control=['if'], data=[]]
def aux(node, parent=None, indent=0, chain=None):
if exclude and (node.key in exclude or node.project_name in exclude):
return [] # depends on [control=['if'], data=[]]
if chain is None:
chain = [node.project_name] # depends on [control=['if'], data=['chain']]
node_str = node.render(parent, frozen)
if parent:
prefix = ' ' * indent + ('- ' if use_bullets else '')
node_str = prefix + node_str # depends on [control=['if'], data=[]]
result = [node_str]
children = [aux(c, node, indent=indent + 2, chain=chain + [c.project_name]) for c in get_children(node) if c.project_name not in chain]
result += list(flatten(children))
return result
lines = flatten([aux(p) for p in nodes])
return '\n'.join(lines)
|
def edit_file(self, file_name):
"""Edit file in place, returns a list of modifications (unified diff).
Arguments:
file_name (str, unicode): The name of the file.
"""
with io.open(file_name, "r", encoding=self.encoding) as from_file:
try:
from_lines = from_file.readlines()
except UnicodeDecodeError as err:
log.error("encoding error (see --encoding): %s", err)
raise
if self._executables:
nb_execs = len(self._executables)
if nb_execs > 1:
log.warn("found %d executables. Will use first one", nb_execs)
exec_list = self._executables[0].split()
exec_list.append(file_name)
try:
log.info("running %s...", " ".join(exec_list))
output = subprocess.check_output(exec_list,
universal_newlines=True)
except Exception as err:
log.error("failed to execute %s: %s", " ".join(exec_list), err)
raise # Let the exception be handled at a higher level.
to_lines = output.split(unicode("\n"))
else:
to_lines = from_lines
# unified_diff wants structure of known length. Convert to a list.
to_lines = list(self.edit_content(to_lines, file_name))
diffs = difflib.unified_diff(from_lines, to_lines,
fromfile=file_name, tofile='<new>')
if not self.dry_run:
bak_file_name = file_name + ".bak"
if os.path.exists(bak_file_name):
msg = "{} already exists".format(bak_file_name)
if sys.version_info < (3, 3):
raise OSError(msg)
else:
# noinspection PyCompatibility
# pylint: disable=undefined-variable
raise FileExistsError(msg)
try:
os.rename(file_name, bak_file_name)
with io.open(file_name, 'w', encoding=self.encoding, newline=self.newline) as new:
new.writelines(to_lines)
# Keeps mode of original file.
shutil.copymode(bak_file_name, file_name)
except Exception as err:
log.error("failed to write output to %s: %s", file_name, err)
# Try to recover...
try:
os.rename(bak_file_name, file_name)
except OSError as err:
log.error("failed to restore %s from %s: %s",
file_name, bak_file_name, err)
raise
try:
os.unlink(bak_file_name)
except OSError as err:
log.warning("failed to remove backup %s: %s",
bak_file_name, err)
return list(diffs)
|
def function[edit_file, parameter[self, file_name]]:
constant[Edit file in place, returns a list of modifications (unified diff).
Arguments:
file_name (str, unicode): The name of the file.
]
with call[name[io].open, parameter[name[file_name], constant[r]]] begin[:]
<ast.Try object at 0x7da1b0ef0cd0>
if name[self]._executables begin[:]
variable[nb_execs] assign[=] call[name[len], parameter[name[self]._executables]]
if compare[name[nb_execs] greater[>] constant[1]] begin[:]
call[name[log].warn, parameter[constant[found %d executables. Will use first one], name[nb_execs]]]
variable[exec_list] assign[=] call[call[name[self]._executables][constant[0]].split, parameter[]]
call[name[exec_list].append, parameter[name[file_name]]]
<ast.Try object at 0x7da1b0ef1150>
variable[to_lines] assign[=] call[name[output].split, parameter[call[name[unicode], parameter[constant[
]]]]]
variable[to_lines] assign[=] call[name[list], parameter[call[name[self].edit_content, parameter[name[to_lines], name[file_name]]]]]
variable[diffs] assign[=] call[name[difflib].unified_diff, parameter[name[from_lines], name[to_lines]]]
if <ast.UnaryOp object at 0x7da1b0d8d960> begin[:]
variable[bak_file_name] assign[=] binary_operation[name[file_name] + constant[.bak]]
if call[name[os].path.exists, parameter[name[bak_file_name]]] begin[:]
variable[msg] assign[=] call[constant[{} already exists].format, parameter[name[bak_file_name]]]
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b0d8cac0>, <ast.Constant object at 0x7da1b0d8d6f0>]]] begin[:]
<ast.Raise object at 0x7da1b0d8dde0>
<ast.Try object at 0x7da1b0d8ddb0>
<ast.Try object at 0x7da1b0da29b0>
return[call[name[list], parameter[name[diffs]]]]
|
keyword[def] identifier[edit_file] ( identifier[self] , identifier[file_name] ):
literal[string]
keyword[with] identifier[io] . identifier[open] ( identifier[file_name] , literal[string] , identifier[encoding] = identifier[self] . identifier[encoding] ) keyword[as] identifier[from_file] :
keyword[try] :
identifier[from_lines] = identifier[from_file] . identifier[readlines] ()
keyword[except] identifier[UnicodeDecodeError] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( literal[string] , identifier[err] )
keyword[raise]
keyword[if] identifier[self] . identifier[_executables] :
identifier[nb_execs] = identifier[len] ( identifier[self] . identifier[_executables] )
keyword[if] identifier[nb_execs] > literal[int] :
identifier[log] . identifier[warn] ( literal[string] , identifier[nb_execs] )
identifier[exec_list] = identifier[self] . identifier[_executables] [ literal[int] ]. identifier[split] ()
identifier[exec_list] . identifier[append] ( identifier[file_name] )
keyword[try] :
identifier[log] . identifier[info] ( literal[string] , literal[string] . identifier[join] ( identifier[exec_list] ))
identifier[output] = identifier[subprocess] . identifier[check_output] ( identifier[exec_list] ,
identifier[universal_newlines] = keyword[True] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( literal[string] , literal[string] . identifier[join] ( identifier[exec_list] ), identifier[err] )
keyword[raise]
identifier[to_lines] = identifier[output] . identifier[split] ( identifier[unicode] ( literal[string] ))
keyword[else] :
identifier[to_lines] = identifier[from_lines]
identifier[to_lines] = identifier[list] ( identifier[self] . identifier[edit_content] ( identifier[to_lines] , identifier[file_name] ))
identifier[diffs] = identifier[difflib] . identifier[unified_diff] ( identifier[from_lines] , identifier[to_lines] ,
identifier[fromfile] = identifier[file_name] , identifier[tofile] = literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[dry_run] :
identifier[bak_file_name] = identifier[file_name] + literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[bak_file_name] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[bak_file_name] )
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ):
keyword[raise] identifier[OSError] ( identifier[msg] )
keyword[else] :
keyword[raise] identifier[FileExistsError] ( identifier[msg] )
keyword[try] :
identifier[os] . identifier[rename] ( identifier[file_name] , identifier[bak_file_name] )
keyword[with] identifier[io] . identifier[open] ( identifier[file_name] , literal[string] , identifier[encoding] = identifier[self] . identifier[encoding] , identifier[newline] = identifier[self] . identifier[newline] ) keyword[as] identifier[new] :
identifier[new] . identifier[writelines] ( identifier[to_lines] )
identifier[shutil] . identifier[copymode] ( identifier[bak_file_name] , identifier[file_name] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( literal[string] , identifier[file_name] , identifier[err] )
keyword[try] :
identifier[os] . identifier[rename] ( identifier[bak_file_name] , identifier[file_name] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( literal[string] ,
identifier[file_name] , identifier[bak_file_name] , identifier[err] )
keyword[raise]
keyword[try] :
identifier[os] . identifier[unlink] ( identifier[bak_file_name] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
identifier[log] . identifier[warning] ( literal[string] ,
identifier[bak_file_name] , identifier[err] )
keyword[return] identifier[list] ( identifier[diffs] )
|
def edit_file(self, file_name):
"""Edit file in place, returns a list of modifications (unified diff).
Arguments:
file_name (str, unicode): The name of the file.
"""
with io.open(file_name, 'r', encoding=self.encoding) as from_file:
try:
from_lines = from_file.readlines() # depends on [control=['try'], data=[]]
except UnicodeDecodeError as err:
log.error('encoding error (see --encoding): %s', err)
raise # depends on [control=['except'], data=['err']] # depends on [control=['with'], data=['from_file']]
if self._executables:
nb_execs = len(self._executables)
if nb_execs > 1:
log.warn('found %d executables. Will use first one', nb_execs) # depends on [control=['if'], data=['nb_execs']]
exec_list = self._executables[0].split()
exec_list.append(file_name)
try:
log.info('running %s...', ' '.join(exec_list))
output = subprocess.check_output(exec_list, universal_newlines=True) # depends on [control=['try'], data=[]]
except Exception as err:
log.error('failed to execute %s: %s', ' '.join(exec_list), err)
raise # Let the exception be handled at a higher level. # depends on [control=['except'], data=['err']]
to_lines = output.split(unicode('\n')) # depends on [control=['if'], data=[]]
else:
to_lines = from_lines
# unified_diff wants structure of known length. Convert to a list.
to_lines = list(self.edit_content(to_lines, file_name))
diffs = difflib.unified_diff(from_lines, to_lines, fromfile=file_name, tofile='<new>')
if not self.dry_run:
bak_file_name = file_name + '.bak'
if os.path.exists(bak_file_name):
msg = '{} already exists'.format(bak_file_name)
if sys.version_info < (3, 3):
raise OSError(msg) # depends on [control=['if'], data=[]]
else:
# noinspection PyCompatibility
# pylint: disable=undefined-variable
raise FileExistsError(msg) # depends on [control=['if'], data=[]]
try:
os.rename(file_name, bak_file_name)
with io.open(file_name, 'w', encoding=self.encoding, newline=self.newline) as new:
new.writelines(to_lines) # depends on [control=['with'], data=['new']]
# Keeps mode of original file.
shutil.copymode(bak_file_name, file_name) # depends on [control=['try'], data=[]]
except Exception as err:
log.error('failed to write output to %s: %s', file_name, err)
# Try to recover...
try:
os.rename(bak_file_name, file_name) # depends on [control=['try'], data=[]]
except OSError as err:
log.error('failed to restore %s from %s: %s', file_name, bak_file_name, err) # depends on [control=['except'], data=['err']]
raise # depends on [control=['except'], data=['err']]
try:
os.unlink(bak_file_name) # depends on [control=['try'], data=[]]
except OSError as err:
log.warning('failed to remove backup %s: %s', bak_file_name, err) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]]
return list(diffs)
|
def _traverse_tree(tree, path):
"""Traverses the permission tree, returning the permission at given permission path."""
path_steps = (step for step in path.split('.') if step != '')
# Special handling for first step, because the first step isn't under 'objects'
first_step = path_steps.next()
subtree = tree[first_step]
for step in path_steps:
subtree = subtree['children'][step]
return subtree
|
def function[_traverse_tree, parameter[tree, path]]:
constant[Traverses the permission tree, returning the permission at given permission path.]
variable[path_steps] assign[=] <ast.GeneratorExp object at 0x7da20c6aba90>
variable[first_step] assign[=] call[name[path_steps].next, parameter[]]
variable[subtree] assign[=] call[name[tree]][name[first_step]]
for taget[name[step]] in starred[name[path_steps]] begin[:]
variable[subtree] assign[=] call[call[name[subtree]][constant[children]]][name[step]]
return[name[subtree]]
|
keyword[def] identifier[_traverse_tree] ( identifier[tree] , identifier[path] ):
literal[string]
identifier[path_steps] =( identifier[step] keyword[for] identifier[step] keyword[in] identifier[path] . identifier[split] ( literal[string] ) keyword[if] identifier[step] != literal[string] )
identifier[first_step] = identifier[path_steps] . identifier[next] ()
identifier[subtree] = identifier[tree] [ identifier[first_step] ]
keyword[for] identifier[step] keyword[in] identifier[path_steps] :
identifier[subtree] = identifier[subtree] [ literal[string] ][ identifier[step] ]
keyword[return] identifier[subtree]
|
def _traverse_tree(tree, path):
"""Traverses the permission tree, returning the permission at given permission path."""
path_steps = (step for step in path.split('.') if step != '')
# Special handling for first step, because the first step isn't under 'objects'
first_step = path_steps.next()
subtree = tree[first_step]
for step in path_steps:
subtree = subtree['children'][step] # depends on [control=['for'], data=['step']]
return subtree
|
def initialize(self):
"""See :meth:`pymlab.sensors.Device.initialize` for more information.
Calls `initialize()` on all devices connected to the bus.
"""
Device.initialize(self)
for child in iter(self.children.values()):
child.initialize()
|
def function[initialize, parameter[self]]:
constant[See :meth:`pymlab.sensors.Device.initialize` for more information.
Calls `initialize()` on all devices connected to the bus.
]
call[name[Device].initialize, parameter[name[self]]]
for taget[name[child]] in starred[call[name[iter], parameter[call[name[self].children.values, parameter[]]]]] begin[:]
call[name[child].initialize, parameter[]]
|
keyword[def] identifier[initialize] ( identifier[self] ):
literal[string]
identifier[Device] . identifier[initialize] ( identifier[self] )
keyword[for] identifier[child] keyword[in] identifier[iter] ( identifier[self] . identifier[children] . identifier[values] ()):
identifier[child] . identifier[initialize] ()
|
def initialize(self):
"""See :meth:`pymlab.sensors.Device.initialize` for more information.
Calls `initialize()` on all devices connected to the bus.
"""
Device.initialize(self)
for child in iter(self.children.values()):
child.initialize() # depends on [control=['for'], data=['child']]
|
def ansi_sgr(text, fg=None, bg=None, style=None, reset=True, **sgr):
"""
Apply desired SGR commands to given text.
:param text:
Text or anything convertible to text
:param fg:
(optional) Foreground color. Choose one of
``black``, ``red``, ``green``, ``yellow``, ``blue``, ``magenta``
``cyan`` or ``white``. Note that the ``bright`` *SGR* impacts
effective color in most implementations.
"""
# Ensure that text is really a string
text = type("")(text)
# NOTE: SGR stands for "set graphics rendition"
sgr_list = [] # List of SGR codes
# Load SGR code associated with desired foreground color
if isinstance(fg, (str, type(""))):
try:
sgr_code = getattr(ANSI, str('sgr_fg_{}'.format(fg)))
except AttributeError:
raise ValueError("incorrect foreground color: {!r}".format(fg))
else:
sgr_list.append(sgr_code)
elif isinstance(fg, tuple):
sgr_code = ANSI.sgr_fg_rgb(*fg)
sgr_list.append(sgr_code)
elif isinstance(fg, int):
if fg < -8:
assert fg in range(-16, -8), fg
# -16 to -9: bright colors
sgr_code = ANSI.sgr_fg_bright(-fg - 8 - 1)
elif fg < 0:
# -8 to -1 bright colors
# Negative numbers represent the classic colors
assert fg in range(-8, 0), fg
sgr_code = ANSI.sgr_fg_classic(-fg - 1)
else:
assert fg in range(256)
sgr_code = ANSI.sgr_fg_indexed(fg)
sgr_list.append(sgr_code)
elif fg is None:
pass
else:
raise ValueError("incorrect foreground color: {!r}".format(fg))
# Load SGR code associated with desired background color
if isinstance(bg, (str, type(""))):
try:
sgr_code = getattr(ANSI, str('sgr_bg_{}'.format(bg)))
except AttributeError:
raise ValueError("incorrect background color: {!r}".format(bg))
else:
sgr_list.append(sgr_code)
elif isinstance(bg, tuple):
sgr_code = ANSI.sgr_bg_rgb(*bg)
sgr_list.append(sgr_code)
elif isinstance(bg, int):
if bg < -8:
assert bg in range(-16, -8), bg
# -16 to -9: bright colors
sgr_code = ANSI.sgr_bg_bright(-bg - 8 - 1)
elif bg < 0:
# -8 to -1 bright colors
# Negative numbers represent the classic colors
assert bg in range(-8, -1), bg
sgr_code = ANSI.sgr_bg_classic(-bg - 1)
else:
assert bg in range(256)
sgr_code = ANSI.sgr_bg_indexed(bg)
sgr_list.append(sgr_code)
elif bg is None:
pass
else:
raise ValueError("incorrect background color: {!r}".format(bg))
# Load single SGR code for "style"
if style is not None:
try:
sgr_code = getattr(ANSI, str('sgr_{}'.format(style)))
except AttributeError:
raise ValueError("incorrect text style: {!r}".format(style))
else:
sgr_list.append(sgr_code)
# Load additional SGR codes (custom)
for name, active in sgr.items():
try:
sgr_code = getattr(ANSI, str('sgr_{}'.format(name)))
except AttributeError:
raise ValueError("incorrect custom SGR code: {!r}".format(name))
else:
if active:
sgr_list.append(sgr_code)
# Combine everything into one sequence
if reset:
return ANSI.cmd_sgr(sgr_list) + text + ANSI.cmd_sgr_reset_all
else:
return ANSI.cmd_sgr(sgr_list) + text
|
def function[ansi_sgr, parameter[text, fg, bg, style, reset]]:
constant[
Apply desired SGR commands to given text.
:param text:
Text or anything convertible to text
:param fg:
(optional) Foreground color. Choose one of
``black``, ``red``, ``green``, ``yellow``, ``blue``, ``magenta``
``cyan`` or ``white``. Note that the ``bright`` *SGR* impacts
effective color in most implementations.
]
variable[text] assign[=] call[call[name[type], parameter[constant[]]], parameter[name[text]]]
variable[sgr_list] assign[=] list[[]]
if call[name[isinstance], parameter[name[fg], tuple[[<ast.Name object at 0x7da20c7cbdf0>, <ast.Call object at 0x7da20c7cab00>]]]] begin[:]
<ast.Try object at 0x7da20c7c9570>
if call[name[isinstance], parameter[name[bg], tuple[[<ast.Name object at 0x7da18f00e3b0>, <ast.Call object at 0x7da18f00f970>]]]] begin[:]
<ast.Try object at 0x7da18f00c0a0>
if compare[name[style] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18dc04a00>
for taget[tuple[[<ast.Name object at 0x7da18dc05750>, <ast.Name object at 0x7da18dc05600>]]] in starred[call[name[sgr].items, parameter[]]] begin[:]
<ast.Try object at 0x7da18dc07e50>
if name[reset] begin[:]
return[binary_operation[binary_operation[call[name[ANSI].cmd_sgr, parameter[name[sgr_list]]] + name[text]] + name[ANSI].cmd_sgr_reset_all]]
|
keyword[def] identifier[ansi_sgr] ( identifier[text] , identifier[fg] = keyword[None] , identifier[bg] = keyword[None] , identifier[style] = keyword[None] , identifier[reset] = keyword[True] ,** identifier[sgr] ):
literal[string]
identifier[text] = identifier[type] ( literal[string] )( identifier[text] )
identifier[sgr_list] =[]
keyword[if] identifier[isinstance] ( identifier[fg] ,( identifier[str] , identifier[type] ( literal[string] ))):
keyword[try] :
identifier[sgr_code] = identifier[getattr] ( identifier[ANSI] , identifier[str] ( literal[string] . identifier[format] ( identifier[fg] )))
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[fg] ))
keyword[else] :
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[isinstance] ( identifier[fg] , identifier[tuple] ):
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_fg_rgb] (* identifier[fg] )
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[isinstance] ( identifier[fg] , identifier[int] ):
keyword[if] identifier[fg] <- literal[int] :
keyword[assert] identifier[fg] keyword[in] identifier[range] (- literal[int] ,- literal[int] ), identifier[fg]
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_fg_bright] (- identifier[fg] - literal[int] - literal[int] )
keyword[elif] identifier[fg] < literal[int] :
keyword[assert] identifier[fg] keyword[in] identifier[range] (- literal[int] , literal[int] ), identifier[fg]
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_fg_classic] (- identifier[fg] - literal[int] )
keyword[else] :
keyword[assert] identifier[fg] keyword[in] identifier[range] ( literal[int] )
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_fg_indexed] ( identifier[fg] )
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[fg] keyword[is] keyword[None] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[fg] ))
keyword[if] identifier[isinstance] ( identifier[bg] ,( identifier[str] , identifier[type] ( literal[string] ))):
keyword[try] :
identifier[sgr_code] = identifier[getattr] ( identifier[ANSI] , identifier[str] ( literal[string] . identifier[format] ( identifier[bg] )))
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[bg] ))
keyword[else] :
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[isinstance] ( identifier[bg] , identifier[tuple] ):
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_bg_rgb] (* identifier[bg] )
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[isinstance] ( identifier[bg] , identifier[int] ):
keyword[if] identifier[bg] <- literal[int] :
keyword[assert] identifier[bg] keyword[in] identifier[range] (- literal[int] ,- literal[int] ), identifier[bg]
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_bg_bright] (- identifier[bg] - literal[int] - literal[int] )
keyword[elif] identifier[bg] < literal[int] :
keyword[assert] identifier[bg] keyword[in] identifier[range] (- literal[int] ,- literal[int] ), identifier[bg]
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_bg_classic] (- identifier[bg] - literal[int] )
keyword[else] :
keyword[assert] identifier[bg] keyword[in] identifier[range] ( literal[int] )
identifier[sgr_code] = identifier[ANSI] . identifier[sgr_bg_indexed] ( identifier[bg] )
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[elif] identifier[bg] keyword[is] keyword[None] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[bg] ))
keyword[if] identifier[style] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[sgr_code] = identifier[getattr] ( identifier[ANSI] , identifier[str] ( literal[string] . identifier[format] ( identifier[style] )))
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[style] ))
keyword[else] :
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[for] identifier[name] , identifier[active] keyword[in] identifier[sgr] . identifier[items] ():
keyword[try] :
identifier[sgr_code] = identifier[getattr] ( identifier[ANSI] , identifier[str] ( literal[string] . identifier[format] ( identifier[name] )))
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[else] :
keyword[if] identifier[active] :
identifier[sgr_list] . identifier[append] ( identifier[sgr_code] )
keyword[if] identifier[reset] :
keyword[return] identifier[ANSI] . identifier[cmd_sgr] ( identifier[sgr_list] )+ identifier[text] + identifier[ANSI] . identifier[cmd_sgr_reset_all]
keyword[else] :
keyword[return] identifier[ANSI] . identifier[cmd_sgr] ( identifier[sgr_list] )+ identifier[text]
|
def ansi_sgr(text, fg=None, bg=None, style=None, reset=True, **sgr):
"""
Apply desired SGR commands to given text.
:param text:
Text or anything convertible to text
:param fg:
(optional) Foreground color. Choose one of
``black``, ``red``, ``green``, ``yellow``, ``blue``, ``magenta``
``cyan`` or ``white``. Note that the ``bright`` *SGR* impacts
effective color in most implementations.
"""
# Ensure that text is really a string
text = type('')(text)
# NOTE: SGR stands for "set graphics rendition"
sgr_list = [] # List of SGR codes
# Load SGR code associated with desired foreground color
if isinstance(fg, (str, type(''))):
try:
sgr_code = getattr(ANSI, str('sgr_fg_{}'.format(fg))) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('incorrect foreground color: {!r}'.format(fg)) # depends on [control=['except'], data=[]]
else:
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif isinstance(fg, tuple):
sgr_code = ANSI.sgr_fg_rgb(*fg)
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif isinstance(fg, int):
if fg < -8:
assert fg in range(-16, -8), fg
# -16 to -9: bright colors
sgr_code = ANSI.sgr_fg_bright(-fg - 8 - 1) # depends on [control=['if'], data=['fg']]
elif fg < 0:
# -8 to -1 bright colors
# Negative numbers represent the classic colors
assert fg in range(-8, 0), fg
sgr_code = ANSI.sgr_fg_classic(-fg - 1) # depends on [control=['if'], data=['fg']]
else:
assert fg in range(256)
sgr_code = ANSI.sgr_fg_indexed(fg)
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif fg is None:
pass # depends on [control=['if'], data=[]]
else:
raise ValueError('incorrect foreground color: {!r}'.format(fg))
# Load SGR code associated with desired background color
if isinstance(bg, (str, type(''))):
try:
sgr_code = getattr(ANSI, str('sgr_bg_{}'.format(bg))) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('incorrect background color: {!r}'.format(bg)) # depends on [control=['except'], data=[]]
else:
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif isinstance(bg, tuple):
sgr_code = ANSI.sgr_bg_rgb(*bg)
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif isinstance(bg, int):
if bg < -8:
assert bg in range(-16, -8), bg
# -16 to -9: bright colors
sgr_code = ANSI.sgr_bg_bright(-bg - 8 - 1) # depends on [control=['if'], data=['bg']]
elif bg < 0:
# -8 to -1 bright colors
# Negative numbers represent the classic colors
assert bg in range(-8, -1), bg
sgr_code = ANSI.sgr_bg_classic(-bg - 1) # depends on [control=['if'], data=['bg']]
else:
assert bg in range(256)
sgr_code = ANSI.sgr_bg_indexed(bg)
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]]
elif bg is None:
pass # depends on [control=['if'], data=[]]
else:
raise ValueError('incorrect background color: {!r}'.format(bg))
# Load single SGR code for "style"
if style is not None:
try:
sgr_code = getattr(ANSI, str('sgr_{}'.format(style))) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('incorrect text style: {!r}'.format(style)) # depends on [control=['except'], data=[]]
else:
sgr_list.append(sgr_code) # depends on [control=['if'], data=['style']]
# Load additional SGR codes (custom)
for (name, active) in sgr.items():
try:
sgr_code = getattr(ANSI, str('sgr_{}'.format(name))) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('incorrect custom SGR code: {!r}'.format(name)) # depends on [control=['except'], data=[]]
else:
if active:
sgr_list.append(sgr_code) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Combine everything into one sequence
if reset:
return ANSI.cmd_sgr(sgr_list) + text + ANSI.cmd_sgr_reset_all # depends on [control=['if'], data=[]]
else:
return ANSI.cmd_sgr(sgr_list) + text
|
def on_setup_ssh(self, b):
"""ATTENTION: modifying the order of operations in this function can lead to unexpected problems"""
with self._setup_ssh_out:
clear_output()
self._ssh_keygen()
#temporary passwords
password = self.__password
proxy_password = self.__proxy_password
# step 1: if hostname is not provided - do not do anything
if self.hostname is None: # check hostname
print("Please specify the computer hostname")
return
# step 2: check if password-free access was enabled earlier
if self.can_login():
print ("Password-free access is already enabled")
# it can still happen that password-free access is enabled
# but host is not present in the config file - fixing this
if not self.is_in_config():
self._write_ssh_config() # we do not use proxy here, because if computer
# can be accessed without any info in the config - proxy is not needed.
self.setup_counter += 1 # only if config file has changed - increase setup_counter
return
# step 3: if can't login already, chek whether all required information is provided
if self.username is None: # check username
print("Please enter your ssh username")
return
if len(password.strip()) == 0: # check password
print("Please enter your ssh password")
return
# step 4: get the right commands to access the proxy server (if provided)
success, proxycmd = self._configure_proxy(password, proxy_password)
if not success:
return
# step 5: make host known by ssh on the proxy server
if not self.is_host_known():
self._make_host_known(self.hostname,['ssh']+[proxycmd] if proxycmd else [])
# step 6: sending public key to the main host
if not self._send_pubkey(self.hostname, self.username, password, proxycmd):
print ("Could not send public key to {}".format(self.hostname))
return
# step 7: modify the ssh config file if necessary
if not self.is_in_config():
self._write_ssh_config(proxycmd=proxycmd)
# TODO: add a check if new config is different from the current one. If so
# infrom the user about it.
# step 8: final check
if self.can_login():
self.setup_counter += 1
print("Automatic ssh setup successful :-)")
return
else:
print("Automatic ssh setup failed, sorry :-(")
return
|
def function[on_setup_ssh, parameter[self, b]]:
constant[ATTENTION: modifying the order of operations in this function can lead to unexpected problems]
with name[self]._setup_ssh_out begin[:]
call[name[clear_output], parameter[]]
call[name[self]._ssh_keygen, parameter[]]
variable[password] assign[=] name[self].__password
variable[proxy_password] assign[=] name[self].__proxy_password
if compare[name[self].hostname is constant[None]] begin[:]
call[name[print], parameter[constant[Please specify the computer hostname]]]
return[None]
if call[name[self].can_login, parameter[]] begin[:]
call[name[print], parameter[constant[Password-free access is already enabled]]]
if <ast.UnaryOp object at 0x7da1b1a76560> begin[:]
call[name[self]._write_ssh_config, parameter[]]
<ast.AugAssign object at 0x7da1b1a76740>
return[None]
if compare[name[self].username is constant[None]] begin[:]
call[name[print], parameter[constant[Please enter your ssh username]]]
return[None]
if compare[call[name[len], parameter[call[name[password].strip, parameter[]]]] equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[Please enter your ssh password]]]
return[None]
<ast.Tuple object at 0x7da18f58f1f0> assign[=] call[name[self]._configure_proxy, parameter[name[password], name[proxy_password]]]
if <ast.UnaryOp object at 0x7da18f58f730> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da18f58fc70> begin[:]
call[name[self]._make_host_known, parameter[name[self].hostname, <ast.IfExp object at 0x7da18f58db70>]]
if <ast.UnaryOp object at 0x7da18f58e6e0> begin[:]
call[name[print], parameter[call[constant[Could not send public key to {}].format, parameter[name[self].hostname]]]]
return[None]
if <ast.UnaryOp object at 0x7da18f58c6d0> begin[:]
call[name[self]._write_ssh_config, parameter[]]
if call[name[self].can_login, parameter[]] begin[:]
<ast.AugAssign object at 0x7da18f58d030>
call[name[print], parameter[constant[Automatic ssh setup successful :-)]]]
return[None]
|
keyword[def] identifier[on_setup_ssh] ( identifier[self] , identifier[b] ):
literal[string]
keyword[with] identifier[self] . identifier[_setup_ssh_out] :
identifier[clear_output] ()
identifier[self] . identifier[_ssh_keygen] ()
identifier[password] = identifier[self] . identifier[__password]
identifier[proxy_password] = identifier[self] . identifier[__proxy_password]
keyword[if] identifier[self] . identifier[hostname] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] identifier[self] . identifier[can_login] ():
identifier[print] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[is_in_config] ():
identifier[self] . identifier[_write_ssh_config] ()
identifier[self] . identifier[setup_counter] += literal[int]
keyword[return]
keyword[if] identifier[self] . identifier[username] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] identifier[len] ( identifier[password] . identifier[strip] ())== literal[int] :
identifier[print] ( literal[string] )
keyword[return]
identifier[success] , identifier[proxycmd] = identifier[self] . identifier[_configure_proxy] ( identifier[password] , identifier[proxy_password] )
keyword[if] keyword[not] identifier[success] :
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[is_host_known] ():
identifier[self] . identifier[_make_host_known] ( identifier[self] . identifier[hostname] ,[ literal[string] ]+[ identifier[proxycmd] ] keyword[if] identifier[proxycmd] keyword[else] [])
keyword[if] keyword[not] identifier[self] . identifier[_send_pubkey] ( identifier[self] . identifier[hostname] , identifier[self] . identifier[username] , identifier[password] , identifier[proxycmd] ):
identifier[print] ( literal[string] . identifier[format] ( identifier[self] . identifier[hostname] ))
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[is_in_config] ():
identifier[self] . identifier[_write_ssh_config] ( identifier[proxycmd] = identifier[proxycmd] )
keyword[if] identifier[self] . identifier[can_login] ():
identifier[self] . identifier[setup_counter] += literal[int]
identifier[print] ( literal[string] )
keyword[return]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return]
|
def on_setup_ssh(self, b):
"""ATTENTION: modifying the order of operations in this function can lead to unexpected problems"""
with self._setup_ssh_out:
clear_output()
self._ssh_keygen()
#temporary passwords
password = self.__password
proxy_password = self.__proxy_password
# step 1: if hostname is not provided - do not do anything
if self.hostname is None: # check hostname
print('Please specify the computer hostname')
return # depends on [control=['if'], data=[]]
# step 2: check if password-free access was enabled earlier
if self.can_login():
print('Password-free access is already enabled')
# it can still happen that password-free access is enabled
# but host is not present in the config file - fixing this
if not self.is_in_config():
self._write_ssh_config() # we do not use proxy here, because if computer
# can be accessed without any info in the config - proxy is not needed.
self.setup_counter += 1 # only if config file has changed - increase setup_counter # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
# step 3: if can't login already, chek whether all required information is provided
if self.username is None: # check username
print('Please enter your ssh username')
return # depends on [control=['if'], data=[]]
if len(password.strip()) == 0: # check password
print('Please enter your ssh password')
return # depends on [control=['if'], data=[]]
# step 4: get the right commands to access the proxy server (if provided)
(success, proxycmd) = self._configure_proxy(password, proxy_password)
if not success:
return # depends on [control=['if'], data=[]]
# step 5: make host known by ssh on the proxy server
if not self.is_host_known():
self._make_host_known(self.hostname, ['ssh'] + [proxycmd] if proxycmd else []) # depends on [control=['if'], data=[]]
# step 6: sending public key to the main host
if not self._send_pubkey(self.hostname, self.username, password, proxycmd):
print('Could not send public key to {}'.format(self.hostname))
return # depends on [control=['if'], data=[]]
# step 7: modify the ssh config file if necessary
if not self.is_in_config():
self._write_ssh_config(proxycmd=proxycmd) # depends on [control=['if'], data=[]]
# TODO: add a check if new config is different from the current one. If so
# infrom the user about it.
# step 8: final check
if self.can_login():
self.setup_counter += 1
print('Automatic ssh setup successful :-)')
return # depends on [control=['if'], data=[]]
else:
print('Automatic ssh setup failed, sorry :-(')
return # depends on [control=['with'], data=[]]
|
def calculate_partition_movement(prev_assignment, curr_assignment):
"""Calculate the partition movements from initial to current assignment.
Algorithm:
For each partition in initial assignment
# If replica set different in current assignment:
# Get Difference in sets
:rtype: tuple
dict((partition, (from_broker_set, to_broker_set)), total_movements
"""
total_movements = 0
movements = {}
for prev_partition, prev_replicas in six.iteritems(prev_assignment):
curr_replicas = curr_assignment[prev_partition]
diff = len(set(curr_replicas) - set(prev_replicas))
if diff:
total_movements += diff
movements[prev_partition] = (
(set(prev_replicas) - set(curr_replicas)),
(set(curr_replicas) - set(prev_replicas)),
)
return movements, total_movements
|
def function[calculate_partition_movement, parameter[prev_assignment, curr_assignment]]:
constant[Calculate the partition movements from initial to current assignment.
Algorithm:
For each partition in initial assignment
# If replica set different in current assignment:
# Get Difference in sets
:rtype: tuple
dict((partition, (from_broker_set, to_broker_set)), total_movements
]
variable[total_movements] assign[=] constant[0]
variable[movements] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b07ac190>, <ast.Name object at 0x7da1b07ae200>]]] in starred[call[name[six].iteritems, parameter[name[prev_assignment]]]] begin[:]
variable[curr_replicas] assign[=] call[name[curr_assignment]][name[prev_partition]]
variable[diff] assign[=] call[name[len], parameter[binary_operation[call[name[set], parameter[name[curr_replicas]]] - call[name[set], parameter[name[prev_replicas]]]]]]
if name[diff] begin[:]
<ast.AugAssign object at 0x7da1b07aefe0>
call[name[movements]][name[prev_partition]] assign[=] tuple[[<ast.BinOp object at 0x7da1b07ad9f0>, <ast.BinOp object at 0x7da1b07ac8b0>]]
return[tuple[[<ast.Name object at 0x7da1b07ac040>, <ast.Name object at 0x7da1b07ad660>]]]
|
keyword[def] identifier[calculate_partition_movement] ( identifier[prev_assignment] , identifier[curr_assignment] ):
literal[string]
identifier[total_movements] = literal[int]
identifier[movements] ={}
keyword[for] identifier[prev_partition] , identifier[prev_replicas] keyword[in] identifier[six] . identifier[iteritems] ( identifier[prev_assignment] ):
identifier[curr_replicas] = identifier[curr_assignment] [ identifier[prev_partition] ]
identifier[diff] = identifier[len] ( identifier[set] ( identifier[curr_replicas] )- identifier[set] ( identifier[prev_replicas] ))
keyword[if] identifier[diff] :
identifier[total_movements] += identifier[diff]
identifier[movements] [ identifier[prev_partition] ]=(
( identifier[set] ( identifier[prev_replicas] )- identifier[set] ( identifier[curr_replicas] )),
( identifier[set] ( identifier[curr_replicas] )- identifier[set] ( identifier[prev_replicas] )),
)
keyword[return] identifier[movements] , identifier[total_movements]
|
def calculate_partition_movement(prev_assignment, curr_assignment):
"""Calculate the partition movements from initial to current assignment.
Algorithm:
For each partition in initial assignment
# If replica set different in current assignment:
# Get Difference in sets
:rtype: tuple
dict((partition, (from_broker_set, to_broker_set)), total_movements
"""
total_movements = 0
movements = {}
for (prev_partition, prev_replicas) in six.iteritems(prev_assignment):
curr_replicas = curr_assignment[prev_partition]
diff = len(set(curr_replicas) - set(prev_replicas))
if diff:
total_movements += diff
movements[prev_partition] = (set(prev_replicas) - set(curr_replicas), set(curr_replicas) - set(prev_replicas)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (movements, total_movements)
|
def types(self):
"""
List of the known event types
"""
r = requests.get(self.evaluator_url + 'types')
r.raise_for_status()
return r.json()
|
def function[types, parameter[self]]:
constant[
List of the known event types
]
variable[r] assign[=] call[name[requests].get, parameter[binary_operation[name[self].evaluator_url + constant[types]]]]
call[name[r].raise_for_status, parameter[]]
return[call[name[r].json, parameter[]]]
|
keyword[def] identifier[types] ( identifier[self] ):
literal[string]
identifier[r] = identifier[requests] . identifier[get] ( identifier[self] . identifier[evaluator_url] + literal[string] )
identifier[r] . identifier[raise_for_status] ()
keyword[return] identifier[r] . identifier[json] ()
|
def types(self):
"""
List of the known event types
"""
r = requests.get(self.evaluator_url + 'types')
r.raise_for_status()
return r.json()
|
def update_style(self, mapping):
"""Use to update fill-color"""
default = {
"presentation:background-visible": "true",
"presentation:background-objects-visible": "true",
"draw:fill": "solid",
"draw:fill-color": "#772953",
"draw:fill-image-width": "0cm",
"draw:fill-image-height": "0cm",
"presentation:display-footer": "true",
"presentation:display-page-number": "false",
"presentation:display-date-time": "true",
}
default.update(mapping)
style = PageStyle(**default)
node = style.style_node()
# add style to automatic-style
self.preso._auto_styles.append(node)
# update page style-name
# found in ._page
self._page.set(ns("draw", "style-name"), node.attrib[ns("style", "name")])
|
def function[update_style, parameter[self, mapping]]:
constant[Use to update fill-color]
variable[default] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c8c10>, <ast.Constant object at 0x7da20c7c9840>, <ast.Constant object at 0x7da20c7ca440>, <ast.Constant object at 0x7da20c7c96c0>, <ast.Constant object at 0x7da20c7c83a0>, <ast.Constant object at 0x7da20c7c9a80>, <ast.Constant object at 0x7da20c7cab60>, <ast.Constant object at 0x7da20c7c91b0>, <ast.Constant object at 0x7da20c7c9030>], [<ast.Constant object at 0x7da20c7c84c0>, <ast.Constant object at 0x7da20c7c9060>, <ast.Constant object at 0x7da20c7c9090>, <ast.Constant object at 0x7da20c7caa70>, <ast.Constant object at 0x7da20c7caaa0>, <ast.Constant object at 0x7da20c7ca9e0>, <ast.Constant object at 0x7da20c7ca140>, <ast.Constant object at 0x7da20c7cada0>, <ast.Constant object at 0x7da20c7cbdf0>]]
call[name[default].update, parameter[name[mapping]]]
variable[style] assign[=] call[name[PageStyle], parameter[]]
variable[node] assign[=] call[name[style].style_node, parameter[]]
call[name[self].preso._auto_styles.append, parameter[name[node]]]
call[name[self]._page.set, parameter[call[name[ns], parameter[constant[draw], constant[style-name]]], call[name[node].attrib][call[name[ns], parameter[constant[style], constant[name]]]]]]
|
keyword[def] identifier[update_style] ( identifier[self] , identifier[mapping] ):
literal[string]
identifier[default] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[default] . identifier[update] ( identifier[mapping] )
identifier[style] = identifier[PageStyle] (** identifier[default] )
identifier[node] = identifier[style] . identifier[style_node] ()
identifier[self] . identifier[preso] . identifier[_auto_styles] . identifier[append] ( identifier[node] )
identifier[self] . identifier[_page] . identifier[set] ( identifier[ns] ( literal[string] , literal[string] ), identifier[node] . identifier[attrib] [ identifier[ns] ( literal[string] , literal[string] )])
|
def update_style(self, mapping):
"""Use to update fill-color"""
default = {'presentation:background-visible': 'true', 'presentation:background-objects-visible': 'true', 'draw:fill': 'solid', 'draw:fill-color': '#772953', 'draw:fill-image-width': '0cm', 'draw:fill-image-height': '0cm', 'presentation:display-footer': 'true', 'presentation:display-page-number': 'false', 'presentation:display-date-time': 'true'}
default.update(mapping)
style = PageStyle(**default)
node = style.style_node()
# add style to automatic-style
self.preso._auto_styles.append(node)
# update page style-name
# found in ._page
self._page.set(ns('draw', 'style-name'), node.attrib[ns('style', 'name')])
|
def make_serializable(data):
"""Ensure data is serializable."""
if is_serializable(data):
return data
# if numpy array convert to list
try:
return data.tolist()
except AttributeError:
pass
except Exception as e:
logger.debug('{} exception ({}): {}'.format(type(e).__name__, e, data))
# try serializing each child element
if isinstance(data, dict):
return {key: make_serializable(value) for key, value in data.items()}
try:
return [make_serializable(element) for element in data]
except TypeError: # not iterable
pass
except Exception:
logger.debug('Could not serialize {}; converting to string'.format(data))
# last resort: convert to string
return str(data)
|
def function[make_serializable, parameter[data]]:
constant[Ensure data is serializable.]
if call[name[is_serializable], parameter[name[data]]] begin[:]
return[name[data]]
<ast.Try object at 0x7da1b040a620>
if call[name[isinstance], parameter[name[data], name[dict]]] begin[:]
return[<ast.DictComp object at 0x7da1b040b4f0>]
<ast.Try object at 0x7da1b04093c0>
return[call[name[str], parameter[name[data]]]]
|
keyword[def] identifier[make_serializable] ( identifier[data] ):
literal[string]
keyword[if] identifier[is_serializable] ( identifier[data] ):
keyword[return] identifier[data]
keyword[try] :
keyword[return] identifier[data] . identifier[tolist] ()
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[type] ( identifier[e] ). identifier[__name__] , identifier[e] , identifier[data] ))
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ):
keyword[return] { identifier[key] : identifier[make_serializable] ( identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[items] ()}
keyword[try] :
keyword[return] [ identifier[make_serializable] ( identifier[element] ) keyword[for] identifier[element] keyword[in] identifier[data] ]
keyword[except] identifier[TypeError] :
keyword[pass]
keyword[except] identifier[Exception] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[data] ))
keyword[return] identifier[str] ( identifier[data] )
|
def make_serializable(data):
"""Ensure data is serializable."""
if is_serializable(data):
return data # depends on [control=['if'], data=[]]
# if numpy array convert to list
try:
return data.tolist() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
except Exception as e:
logger.debug('{} exception ({}): {}'.format(type(e).__name__, e, data)) # depends on [control=['except'], data=['e']]
# try serializing each child element
if isinstance(data, dict):
return {key: make_serializable(value) for (key, value) in data.items()} # depends on [control=['if'], data=[]]
try:
return [make_serializable(element) for element in data] # depends on [control=['try'], data=[]]
except TypeError: # not iterable
pass # depends on [control=['except'], data=[]]
except Exception:
logger.debug('Could not serialize {}; converting to string'.format(data)) # depends on [control=['except'], data=[]]
# last resort: convert to string
return str(data)
|
def confd_state_snmp_version_v2c(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring")
snmp = ET.SubElement(confd_state, "snmp")
version = ET.SubElement(snmp, "version")
v2c = ET.SubElement(version, "v2c")
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[confd_state_snmp_version_v2c, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[confd_state] assign[=] call[name[ET].SubElement, parameter[name[config], constant[confd-state]]]
variable[snmp] assign[=] call[name[ET].SubElement, parameter[name[confd_state], constant[snmp]]]
variable[version] assign[=] call[name[ET].SubElement, parameter[name[snmp], constant[version]]]
variable[v2c] assign[=] call[name[ET].SubElement, parameter[name[version], constant[v2c]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[confd_state_snmp_version_v2c] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[confd_state] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[snmp] = identifier[ET] . identifier[SubElement] ( identifier[confd_state] , literal[string] )
identifier[version] = identifier[ET] . identifier[SubElement] ( identifier[snmp] , literal[string] )
identifier[v2c] = identifier[ET] . identifier[SubElement] ( identifier[version] , literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def confd_state_snmp_version_v2c(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
confd_state = ET.SubElement(config, 'confd-state', xmlns='http://tail-f.com/yang/confd-monitoring')
snmp = ET.SubElement(confd_state, 'snmp')
version = ET.SubElement(snmp, 'version')
v2c = ET.SubElement(version, 'v2c')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def png_as_base64_str(self, scale=1, module_color=(0, 0, 0, 255),
background=(255, 255, 255, 255), quiet_zone=4):
"""This method uses the png render and returns the PNG image encoded as
base64 string. This can be useful for creating dynamic PNG images for
web development, since no file needs to be created.
Example:
>>> code = pyqrcode.create('Are you suggesting coconuts migrate?')
>>> image_as_str = code.png_as_base64_str(scale=5)
>>> html_img = '<img src="data:image/png;base64,{}">'.format(image_as_str)
The parameters are passed directly to the :py:meth:`png` method. Refer
to that method's documentation for the meaning behind the parameters.
.. note::
This method depends on the pypng module to actually create the
PNG image.
"""
import io
import base64
with io.BytesIO() as virtual_file:
self.png(file=virtual_file, scale=scale, module_color=module_color,
background=background, quiet_zone=quiet_zone)
image_as_str = base64.b64encode(virtual_file.getvalue()).decode("ascii")
return image_as_str
|
def function[png_as_base64_str, parameter[self, scale, module_color, background, quiet_zone]]:
constant[This method uses the png render and returns the PNG image encoded as
base64 string. This can be useful for creating dynamic PNG images for
web development, since no file needs to be created.
Example:
>>> code = pyqrcode.create('Are you suggesting coconuts migrate?')
>>> image_as_str = code.png_as_base64_str(scale=5)
>>> html_img = '<img src="data:image/png;base64,{}">'.format(image_as_str)
The parameters are passed directly to the :py:meth:`png` method. Refer
to that method's documentation for the meaning behind the parameters.
.. note::
This method depends on the pypng module to actually create the
PNG image.
]
import module[io]
import module[base64]
with call[name[io].BytesIO, parameter[]] begin[:]
call[name[self].png, parameter[]]
variable[image_as_str] assign[=] call[call[name[base64].b64encode, parameter[call[name[virtual_file].getvalue, parameter[]]]].decode, parameter[constant[ascii]]]
return[name[image_as_str]]
|
keyword[def] identifier[png_as_base64_str] ( identifier[self] , identifier[scale] = literal[int] , identifier[module_color] =( literal[int] , literal[int] , literal[int] , literal[int] ),
identifier[background] =( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[quiet_zone] = literal[int] ):
literal[string]
keyword[import] identifier[io]
keyword[import] identifier[base64]
keyword[with] identifier[io] . identifier[BytesIO] () keyword[as] identifier[virtual_file] :
identifier[self] . identifier[png] ( identifier[file] = identifier[virtual_file] , identifier[scale] = identifier[scale] , identifier[module_color] = identifier[module_color] ,
identifier[background] = identifier[background] , identifier[quiet_zone] = identifier[quiet_zone] )
identifier[image_as_str] = identifier[base64] . identifier[b64encode] ( identifier[virtual_file] . identifier[getvalue] ()). identifier[decode] ( literal[string] )
keyword[return] identifier[image_as_str]
|
def png_as_base64_str(self, scale=1, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4):
"""This method uses the png render and returns the PNG image encoded as
base64 string. This can be useful for creating dynamic PNG images for
web development, since no file needs to be created.
Example:
>>> code = pyqrcode.create('Are you suggesting coconuts migrate?')
>>> image_as_str = code.png_as_base64_str(scale=5)
>>> html_img = '<img src="data:image/png;base64,{}">'.format(image_as_str)
The parameters are passed directly to the :py:meth:`png` method. Refer
to that method's documentation for the meaning behind the parameters.
.. note::
This method depends on the pypng module to actually create the
PNG image.
"""
import io
import base64
with io.BytesIO() as virtual_file:
self.png(file=virtual_file, scale=scale, module_color=module_color, background=background, quiet_zone=quiet_zone)
image_as_str = base64.b64encode(virtual_file.getvalue()).decode('ascii') # depends on [control=['with'], data=['virtual_file']]
return image_as_str
|
def Task(func, *args, **kwargs):
"""Adapts a callback-based asynchronous function for use in coroutines.
Takes a function (and optional additional arguments) and runs it with
those arguments plus a ``callback`` keyword argument. The argument passed
to the callback is returned as the result of the yield expression.
.. versionchanged:: 4.0
``gen.Task`` is now a function that returns a `.Future`, instead of
a subclass of `YieldPoint`. It still behaves the same way when
yielded.
"""
future = Future()
def handle_exception(typ, value, tb):
if future.done():
return False
future.set_exc_info((typ, value, tb))
return True
def set_result(result):
if future.done():
return
future.set_result(result)
with stack_context.ExceptionStackContext(handle_exception):
func(*args, callback=_argument_adapter(set_result), **kwargs)
return future
|
def function[Task, parameter[func]]:
constant[Adapts a callback-based asynchronous function for use in coroutines.
Takes a function (and optional additional arguments) and runs it with
those arguments plus a ``callback`` keyword argument. The argument passed
to the callback is returned as the result of the yield expression.
.. versionchanged:: 4.0
``gen.Task`` is now a function that returns a `.Future`, instead of
a subclass of `YieldPoint`. It still behaves the same way when
yielded.
]
variable[future] assign[=] call[name[Future], parameter[]]
def function[handle_exception, parameter[typ, value, tb]]:
if call[name[future].done, parameter[]] begin[:]
return[constant[False]]
call[name[future].set_exc_info, parameter[tuple[[<ast.Name object at 0x7da18bc70370>, <ast.Name object at 0x7da1b1bee740>, <ast.Name object at 0x7da1b1bef670>]]]]
return[constant[True]]
def function[set_result, parameter[result]]:
if call[name[future].done, parameter[]] begin[:]
return[None]
call[name[future].set_result, parameter[name[result]]]
with call[name[stack_context].ExceptionStackContext, parameter[name[handle_exception]]] begin[:]
call[name[func], parameter[<ast.Starred object at 0x7da1b1bab550>]]
return[name[future]]
|
keyword[def] identifier[Task] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[future] = identifier[Future] ()
keyword[def] identifier[handle_exception] ( identifier[typ] , identifier[value] , identifier[tb] ):
keyword[if] identifier[future] . identifier[done] ():
keyword[return] keyword[False]
identifier[future] . identifier[set_exc_info] (( identifier[typ] , identifier[value] , identifier[tb] ))
keyword[return] keyword[True]
keyword[def] identifier[set_result] ( identifier[result] ):
keyword[if] identifier[future] . identifier[done] ():
keyword[return]
identifier[future] . identifier[set_result] ( identifier[result] )
keyword[with] identifier[stack_context] . identifier[ExceptionStackContext] ( identifier[handle_exception] ):
identifier[func] (* identifier[args] , identifier[callback] = identifier[_argument_adapter] ( identifier[set_result] ),** identifier[kwargs] )
keyword[return] identifier[future]
|
def Task(func, *args, **kwargs):
"""Adapts a callback-based asynchronous function for use in coroutines.
Takes a function (and optional additional arguments) and runs it with
those arguments plus a ``callback`` keyword argument. The argument passed
to the callback is returned as the result of the yield expression.
.. versionchanged:: 4.0
``gen.Task`` is now a function that returns a `.Future`, instead of
a subclass of `YieldPoint`. It still behaves the same way when
yielded.
"""
future = Future()
def handle_exception(typ, value, tb):
if future.done():
return False # depends on [control=['if'], data=[]]
future.set_exc_info((typ, value, tb))
return True
def set_result(result):
if future.done():
return # depends on [control=['if'], data=[]]
future.set_result(result)
with stack_context.ExceptionStackContext(handle_exception):
func(*args, callback=_argument_adapter(set_result), **kwargs) # depends on [control=['with'], data=[]]
return future
|
def unwrap_errors(path_replace):
# type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None]
"""Get a context to map OS errors to their `fs.errors` counterpart.
The context will re-write the paths in resource exceptions to be
in the same context as the wrapped filesystem.
The only parameter may be the path from the parent, if only one path
is to be unwrapped. Or it may be a dictionary that maps wrapped
paths on to unwrapped paths.
"""
try:
yield
except errors.ResourceError as e:
if hasattr(e, "path"):
if isinstance(path_replace, Mapping):
e.path = path_replace.get(e.path, e.path)
else:
e.path = path_replace
reraise(type(e), e)
|
def function[unwrap_errors, parameter[path_replace]]:
constant[Get a context to map OS errors to their `fs.errors` counterpart.
The context will re-write the paths in resource exceptions to be
in the same context as the wrapped filesystem.
The only parameter may be the path from the parent, if only one path
is to be unwrapped. Or it may be a dictionary that maps wrapped
paths on to unwrapped paths.
]
<ast.Try object at 0x7da1b1686890>
|
keyword[def] identifier[unwrap_errors] ( identifier[path_replace] ):
literal[string]
keyword[try] :
keyword[yield]
keyword[except] identifier[errors] . identifier[ResourceError] keyword[as] identifier[e] :
keyword[if] identifier[hasattr] ( identifier[e] , literal[string] ):
keyword[if] identifier[isinstance] ( identifier[path_replace] , identifier[Mapping] ):
identifier[e] . identifier[path] = identifier[path_replace] . identifier[get] ( identifier[e] . identifier[path] , identifier[e] . identifier[path] )
keyword[else] :
identifier[e] . identifier[path] = identifier[path_replace]
identifier[reraise] ( identifier[type] ( identifier[e] ), identifier[e] )
|
def unwrap_errors(path_replace):
# type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None]
'Get a context to map OS errors to their `fs.errors` counterpart.\n\n The context will re-write the paths in resource exceptions to be\n in the same context as the wrapped filesystem.\n\n The only parameter may be the path from the parent, if only one path\n is to be unwrapped. Or it may be a dictionary that maps wrapped\n paths on to unwrapped paths.\n\n '
try:
yield # depends on [control=['try'], data=[]]
except errors.ResourceError as e:
if hasattr(e, 'path'):
if isinstance(path_replace, Mapping):
e.path = path_replace.get(e.path, e.path) # depends on [control=['if'], data=[]]
else:
e.path = path_replace # depends on [control=['if'], data=[]]
reraise(type(e), e) # depends on [control=['except'], data=['e']]
|
def rename(self, new_id):
"""
Renames the DatabaseObject to have ID_KEY new_id. This is the only
way allowed by DatabaseObject to change the ID_KEY of an object.
Trying to modify ID_KEY in the dictionary will raise an exception.
@param new_id: the new value for ID_KEY
NOTE: This is actually a create and delete.
WARNING: If the system fails during a rename, data may be duplicated.
"""
old_id = dict.__getitem__(self, ID_KEY)
dict.__setitem__(self, ID_KEY, new_id)
self._collection.save(self)
self._collection.remove({ID_KEY: old_id})
|
def function[rename, parameter[self, new_id]]:
constant[
Renames the DatabaseObject to have ID_KEY new_id. This is the only
way allowed by DatabaseObject to change the ID_KEY of an object.
Trying to modify ID_KEY in the dictionary will raise an exception.
@param new_id: the new value for ID_KEY
NOTE: This is actually a create and delete.
WARNING: If the system fails during a rename, data may be duplicated.
]
variable[old_id] assign[=] call[name[dict].__getitem__, parameter[name[self], name[ID_KEY]]]
call[name[dict].__setitem__, parameter[name[self], name[ID_KEY], name[new_id]]]
call[name[self]._collection.save, parameter[name[self]]]
call[name[self]._collection.remove, parameter[dictionary[[<ast.Name object at 0x7da1b0baab90>], [<ast.Name object at 0x7da1b0baa920>]]]]
|
keyword[def] identifier[rename] ( identifier[self] , identifier[new_id] ):
literal[string]
identifier[old_id] = identifier[dict] . identifier[__getitem__] ( identifier[self] , identifier[ID_KEY] )
identifier[dict] . identifier[__setitem__] ( identifier[self] , identifier[ID_KEY] , identifier[new_id] )
identifier[self] . identifier[_collection] . identifier[save] ( identifier[self] )
identifier[self] . identifier[_collection] . identifier[remove] ({ identifier[ID_KEY] : identifier[old_id] })
|
def rename(self, new_id):
"""
Renames the DatabaseObject to have ID_KEY new_id. This is the only
way allowed by DatabaseObject to change the ID_KEY of an object.
Trying to modify ID_KEY in the dictionary will raise an exception.
@param new_id: the new value for ID_KEY
NOTE: This is actually a create and delete.
WARNING: If the system fails during a rename, data may be duplicated.
"""
old_id = dict.__getitem__(self, ID_KEY)
dict.__setitem__(self, ID_KEY, new_id)
self._collection.save(self)
self._collection.remove({ID_KEY: old_id})
|
def save_vtq():
"""Exit on Signal"""
global vtq
print('Saving VirusTotal Query Cache...')
pickle.dump(vtq, open('vtq.pkl', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
sys.exit()
|
def function[save_vtq, parameter[]]:
constant[Exit on Signal]
<ast.Global object at 0x7da18bccaa40>
call[name[print], parameter[constant[Saving VirusTotal Query Cache...]]]
call[name[pickle].dump, parameter[name[vtq], call[name[open], parameter[constant[vtq.pkl], constant[wb]]]]]
call[name[sys].exit, parameter[]]
|
keyword[def] identifier[save_vtq] ():
literal[string]
keyword[global] identifier[vtq]
identifier[print] ( literal[string] )
identifier[pickle] . identifier[dump] ( identifier[vtq] , identifier[open] ( literal[string] , literal[string] ), identifier[protocol] = identifier[pickle] . identifier[HIGHEST_PROTOCOL] )
identifier[sys] . identifier[exit] ()
|
def save_vtq():
"""Exit on Signal"""
global vtq
print('Saving VirusTotal Query Cache...')
pickle.dump(vtq, open('vtq.pkl', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
sys.exit()
|
def _instant_search(self):
"""Determine possible keys after a push or pop
"""
_keys = []
for k,v in self.searchables.iteritems():
if self.string in v:
_keys.append(k)
self.candidates.append(_keys)
|
def function[_instant_search, parameter[self]]:
constant[Determine possible keys after a push or pop
]
variable[_keys] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cf250>, <ast.Name object at 0x7da18c4cf2e0>]]] in starred[call[name[self].searchables.iteritems, parameter[]]] begin[:]
if compare[name[self].string in name[v]] begin[:]
call[name[_keys].append, parameter[name[k]]]
call[name[self].candidates.append, parameter[name[_keys]]]
|
keyword[def] identifier[_instant_search] ( identifier[self] ):
literal[string]
identifier[_keys] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[searchables] . identifier[iteritems] ():
keyword[if] identifier[self] . identifier[string] keyword[in] identifier[v] :
identifier[_keys] . identifier[append] ( identifier[k] )
identifier[self] . identifier[candidates] . identifier[append] ( identifier[_keys] )
|
def _instant_search(self):
"""Determine possible keys after a push or pop
"""
_keys = []
for (k, v) in self.searchables.iteritems():
if self.string in v:
_keys.append(k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.candidates.append(_keys)
|
def _create_config_file(out_dir, samples):
"""Provide configuration file hiding duplicate columns.
Future entry point for providing top level configuration of output reports.
"""
out_file = os.path.join(out_dir, "multiqc_config.yaml")
out = {"table_columns_visible": dict()}
# Avoid duplicated bcbio columns with qualimap
if any(("qualimap" in dd.get_tools_on(d) or "qualimap_full" in dd.get_tools_on(d)) for d in samples):
# Hiding metrics duplicated by Qualimap
out["table_columns_visible"]["bcbio"] = {"Average_insert_size": False}
out["table_columns_visible"]["FastQC"] = {"percent_gc": False}
# Setting up thresholds for Qualimap depth cutoff calculations, based on sample avg depths
avg_depths = [tz.get_in(["summary", "metrics", "Avg_coverage"], s) for s in samples]
avg_depths = [x for x in avg_depths if x]
# Picking all thresholds up to the highest sample average depth
thresholds = [t for t in coverage.DEPTH_THRESHOLDS if not avg_depths or t <= max(avg_depths)]
# ...plus one more
if len(thresholds) < len(coverage.DEPTH_THRESHOLDS):
thresholds.append(coverage.DEPTH_THRESHOLDS[len(thresholds)])
# Showing only thresholds surrounding any of average depths
thresholds_hidden = []
for i, t in enumerate(thresholds):
if t > 20: # Not hiding anything below 20x
if any(thresholds[i-1] <= c < thresholds[i] for c in avg_depths if c and i-1 >= 0) or \
any(thresholds[i] <= c < thresholds[i+1] for c in avg_depths if c and i+1 < len(thresholds)):
pass
else:
thresholds_hidden.append(t)
# Hide coverage unless running full qualimap, downsampled inputs are confusing
if not any(("qualimap_full" in dd.get_tools_on(d)) for d in samples):
thresholds_hidden = thresholds + thresholds_hidden
thresholds_hidden.sort()
thresholds = []
out['qualimap_config'] = {
'general_stats_coverage': [str(t) for t in thresholds],
'general_stats_coverage_hidden': [str(t) for t in thresholds_hidden]}
# Avoid confusing peddy outputs, sticking to ancestry and sex prediction
out["table_columns_visible"]["Peddy"] = {"family_id": False, "sex_het_ratio": False,
"error_sex_check": False}
# Setting the module order
module_order = []
module_order.extend([
"bcbio",
"samtools",
"goleft_indexcov",
"peddy"
])
out['bcftools'] = {'write_separate_table': True}
# if germline calling was performed:
if any("germline" in (get_active_vcinfo(s) or {}) or # tumor-only somatic with germline extraction
dd.get_phenotype(s) == "germline" or # or paired somatic with germline calling for normal
_has_bcftools_germline_stats(s) # CWL organized statistics
for s in samples):
# Split somatic and germline variant stats into separate multiqc submodules,
# with somatic going into General Stats, and germline going into a separate table:
module_order.extend([{
'bcftools': {
'name': 'Bcftools (somatic)',
'info': 'Bcftools stats for somatic variant calls only.',
'path_filters': ['*_bcftools_stats.txt'],
'write_general_stats': True,
}},
{'bcftools': {
'name': 'Bcftools (germline)',
'info': 'Bcftools stats for germline variant calls only.',
'path_filters': ['*_bcftools_stats_germline.txt'],
'write_general_stats': False
}},
])
else:
module_order.append("bcftools")
module_order.extend([
"salmon",
"picard",
"qualimap",
"snpeff",
"fastqc",
"preseq",
])
out["module_order"] = module_order
preseq_samples = [s for s in samples if tz.get_in(["config", "algorithm", "preseq"], s)]
if preseq_samples:
out["preseq"] = _make_preseq_multiqc_config(preseq_samples)
with open(out_file, "w") as out_handle:
yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
return out_file
|
def function[_create_config_file, parameter[out_dir, samples]]:
constant[Provide configuration file hiding duplicate columns.
Future entry point for providing top level configuration of output reports.
]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], constant[multiqc_config.yaml]]]
variable[out] assign[=] dictionary[[<ast.Constant object at 0x7da1b18ff160>], [<ast.Call object at 0x7da1b18ff0d0>]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b18fce80>]] begin[:]
call[call[name[out]][constant[table_columns_visible]]][constant[bcbio]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18fce20>], [<ast.Constant object at 0x7da1b18fd990>]]
call[call[name[out]][constant[table_columns_visible]]][constant[FastQC]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18fe650>], [<ast.Constant object at 0x7da1b18ff400>]]
variable[avg_depths] assign[=] <ast.ListComp object at 0x7da1b18fcd00>
variable[avg_depths] assign[=] <ast.ListComp object at 0x7da1b18fc400>
variable[thresholds] assign[=] <ast.ListComp object at 0x7da1b18fdc90>
if compare[call[name[len], parameter[name[thresholds]]] less[<] call[name[len], parameter[name[coverage].DEPTH_THRESHOLDS]]] begin[:]
call[name[thresholds].append, parameter[call[name[coverage].DEPTH_THRESHOLDS][call[name[len], parameter[name[thresholds]]]]]]
variable[thresholds_hidden] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b18fdcf0>, <ast.Name object at 0x7da1b18fcd60>]]] in starred[call[name[enumerate], parameter[name[thresholds]]]] begin[:]
if compare[name[t] greater[>] constant[20]] begin[:]
if <ast.BoolOp object at 0x7da1b18fe770> begin[:]
pass
if <ast.UnaryOp object at 0x7da1b18a8730> begin[:]
variable[thresholds_hidden] assign[=] binary_operation[name[thresholds] + name[thresholds_hidden]]
call[name[thresholds_hidden].sort, parameter[]]
variable[thresholds] assign[=] list[[]]
call[name[out]][constant[qualimap_config]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18a8f40>, <ast.Constant object at 0x7da1b18a8c70>], [<ast.ListComp object at 0x7da1b18a87f0>, <ast.ListComp object at 0x7da1b18ab820>]]
call[call[name[out]][constant[table_columns_visible]]][constant[Peddy]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18ab730>, <ast.Constant object at 0x7da1b18a9000>, <ast.Constant object at 0x7da1b18aa500>], [<ast.Constant object at 0x7da1b18a8490>, <ast.Constant object at 0x7da1b18ab8b0>, <ast.Constant object at 0x7da1b18ab2e0>]]
variable[module_order] assign[=] list[[]]
call[name[module_order].extend, parameter[list[[<ast.Constant object at 0x7da1b18a9ba0>, <ast.Constant object at 0x7da1b18a9390>, <ast.Constant object at 0x7da1b18a88e0>, <ast.Constant object at 0x7da1b18aabc0>]]]]
call[name[out]][constant[bcftools]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18abc40>], [<ast.Constant object at 0x7da1b18a8280>]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b18a93c0>]] begin[:]
call[name[module_order].extend, parameter[list[[<ast.Dict object at 0x7da1b18ab760>, <ast.Dict object at 0x7da1b18faec0>]]]]
call[name[module_order].extend, parameter[list[[<ast.Constant object at 0x7da1b18fb7f0>, <ast.Constant object at 0x7da1b18fa0b0>, <ast.Constant object at 0x7da1b18f82b0>, <ast.Constant object at 0x7da1b18fadd0>, <ast.Constant object at 0x7da1b18fb340>, <ast.Constant object at 0x7da1b18fbca0>]]]]
call[name[out]][constant[module_order]] assign[=] name[module_order]
variable[preseq_samples] assign[=] <ast.ListComp object at 0x7da1b18fb460>
if name[preseq_samples] begin[:]
call[name[out]][constant[preseq]] assign[=] call[name[_make_preseq_multiqc_config], parameter[name[preseq_samples]]]
with call[name[open], parameter[name[out_file], constant[w]]] begin[:]
call[name[yaml].safe_dump, parameter[name[out], name[out_handle]]]
return[name[out_file]]
|
keyword[def] identifier[_create_config_file] ( identifier[out_dir] , identifier[samples] ):
literal[string]
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] )
identifier[out] ={ literal[string] : identifier[dict] ()}
keyword[if] identifier[any] (( literal[string] keyword[in] identifier[dd] . identifier[get_tools_on] ( identifier[d] ) keyword[or] literal[string] keyword[in] identifier[dd] . identifier[get_tools_on] ( identifier[d] )) keyword[for] identifier[d] keyword[in] identifier[samples] ):
identifier[out] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[False] }
identifier[out] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[False] }
identifier[avg_depths] =[ identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] , literal[string] ], identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[samples] ]
identifier[avg_depths] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[avg_depths] keyword[if] identifier[x] ]
identifier[thresholds] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[coverage] . identifier[DEPTH_THRESHOLDS] keyword[if] keyword[not] identifier[avg_depths] keyword[or] identifier[t] <= identifier[max] ( identifier[avg_depths] )]
keyword[if] identifier[len] ( identifier[thresholds] )< identifier[len] ( identifier[coverage] . identifier[DEPTH_THRESHOLDS] ):
identifier[thresholds] . identifier[append] ( identifier[coverage] . identifier[DEPTH_THRESHOLDS] [ identifier[len] ( identifier[thresholds] )])
identifier[thresholds_hidden] =[]
keyword[for] identifier[i] , identifier[t] keyword[in] identifier[enumerate] ( identifier[thresholds] ):
keyword[if] identifier[t] > literal[int] :
keyword[if] identifier[any] ( identifier[thresholds] [ identifier[i] - literal[int] ]<= identifier[c] < identifier[thresholds] [ identifier[i] ] keyword[for] identifier[c] keyword[in] identifier[avg_depths] keyword[if] identifier[c] keyword[and] identifier[i] - literal[int] >= literal[int] ) keyword[or] identifier[any] ( identifier[thresholds] [ identifier[i] ]<= identifier[c] < identifier[thresholds] [ identifier[i] + literal[int] ] keyword[for] identifier[c] keyword[in] identifier[avg_depths] keyword[if] identifier[c] keyword[and] identifier[i] + literal[int] < identifier[len] ( identifier[thresholds] )):
keyword[pass]
keyword[else] :
identifier[thresholds_hidden] . identifier[append] ( identifier[t] )
keyword[if] keyword[not] identifier[any] (( literal[string] keyword[in] identifier[dd] . identifier[get_tools_on] ( identifier[d] )) keyword[for] identifier[d] keyword[in] identifier[samples] ):
identifier[thresholds_hidden] = identifier[thresholds] + identifier[thresholds_hidden]
identifier[thresholds_hidden] . identifier[sort] ()
identifier[thresholds] =[]
identifier[out] [ literal[string] ]={
literal[string] :[ identifier[str] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[thresholds] ],
literal[string] :[ identifier[str] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[thresholds_hidden] ]}
identifier[out] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[False] , literal[string] : keyword[False] ,
literal[string] : keyword[False] }
identifier[module_order] =[]
identifier[module_order] . identifier[extend] ([
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
])
identifier[out] [ literal[string] ]={ literal[string] : keyword[True] }
keyword[if] identifier[any] ( literal[string] keyword[in] ( identifier[get_active_vcinfo] ( identifier[s] ) keyword[or] {}) keyword[or]
identifier[dd] . identifier[get_phenotype] ( identifier[s] )== literal[string] keyword[or]
identifier[_has_bcftools_germline_stats] ( identifier[s] )
keyword[for] identifier[s] keyword[in] identifier[samples] ):
identifier[module_order] . identifier[extend] ([{
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :[ literal[string] ],
literal[string] : keyword[True] ,
}},
{ literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :[ literal[string] ],
literal[string] : keyword[False]
}},
])
keyword[else] :
identifier[module_order] . identifier[append] ( literal[string] )
identifier[module_order] . identifier[extend] ([
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
])
identifier[out] [ literal[string] ]= identifier[module_order]
identifier[preseq_samples] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[samples] keyword[if] identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] , literal[string] ], identifier[s] )]
keyword[if] identifier[preseq_samples] :
identifier[out] [ literal[string] ]= identifier[_make_preseq_multiqc_config] ( identifier[preseq_samples] )
keyword[with] identifier[open] ( identifier[out_file] , literal[string] ) keyword[as] identifier[out_handle] :
identifier[yaml] . identifier[safe_dump] ( identifier[out] , identifier[out_handle] , identifier[default_flow_style] = keyword[False] , identifier[allow_unicode] = keyword[False] )
keyword[return] identifier[out_file]
|
def _create_config_file(out_dir, samples):
"""Provide configuration file hiding duplicate columns.
Future entry point for providing top level configuration of output reports.
"""
out_file = os.path.join(out_dir, 'multiqc_config.yaml')
out = {'table_columns_visible': dict()}
# Avoid duplicated bcbio columns with qualimap
if any(('qualimap' in dd.get_tools_on(d) or 'qualimap_full' in dd.get_tools_on(d) for d in samples)):
# Hiding metrics duplicated by Qualimap
out['table_columns_visible']['bcbio'] = {'Average_insert_size': False}
out['table_columns_visible']['FastQC'] = {'percent_gc': False}
# Setting up thresholds for Qualimap depth cutoff calculations, based on sample avg depths
avg_depths = [tz.get_in(['summary', 'metrics', 'Avg_coverage'], s) for s in samples]
avg_depths = [x for x in avg_depths if x]
# Picking all thresholds up to the highest sample average depth
thresholds = [t for t in coverage.DEPTH_THRESHOLDS if not avg_depths or t <= max(avg_depths)]
# ...plus one more
if len(thresholds) < len(coverage.DEPTH_THRESHOLDS):
thresholds.append(coverage.DEPTH_THRESHOLDS[len(thresholds)]) # depends on [control=['if'], data=[]]
# Showing only thresholds surrounding any of average depths
thresholds_hidden = []
for (i, t) in enumerate(thresholds):
if t > 20: # Not hiding anything below 20x
if any((thresholds[i - 1] <= c < thresholds[i] for c in avg_depths if c and i - 1 >= 0)) or any((thresholds[i] <= c < thresholds[i + 1] for c in avg_depths if c and i + 1 < len(thresholds))):
pass # depends on [control=['if'], data=[]]
else:
thresholds_hidden.append(t) # depends on [control=['if'], data=['t']] # depends on [control=['for'], data=[]]
# Hide coverage unless running full qualimap, downsampled inputs are confusing
if not any(('qualimap_full' in dd.get_tools_on(d) for d in samples)):
thresholds_hidden = thresholds + thresholds_hidden
thresholds_hidden.sort()
thresholds = [] # depends on [control=['if'], data=[]]
out['qualimap_config'] = {'general_stats_coverage': [str(t) for t in thresholds], 'general_stats_coverage_hidden': [str(t) for t in thresholds_hidden]} # depends on [control=['if'], data=[]]
# Avoid confusing peddy outputs, sticking to ancestry and sex prediction
out['table_columns_visible']['Peddy'] = {'family_id': False, 'sex_het_ratio': False, 'error_sex_check': False}
# Setting the module order
module_order = []
module_order.extend(['bcbio', 'samtools', 'goleft_indexcov', 'peddy'])
out['bcftools'] = {'write_separate_table': True}
# if germline calling was performed:
if any(('germline' in (get_active_vcinfo(s) or {}) or dd.get_phenotype(s) == 'germline' or _has_bcftools_germline_stats(s) for s in samples)): # tumor-only somatic with germline extraction
# or paired somatic with germline calling for normal
# CWL organized statistics
# Split somatic and germline variant stats into separate multiqc submodules,
# with somatic going into General Stats, and germline going into a separate table:
module_order.extend([{'bcftools': {'name': 'Bcftools (somatic)', 'info': 'Bcftools stats for somatic variant calls only.', 'path_filters': ['*_bcftools_stats.txt'], 'write_general_stats': True}}, {'bcftools': {'name': 'Bcftools (germline)', 'info': 'Bcftools stats for germline variant calls only.', 'path_filters': ['*_bcftools_stats_germline.txt'], 'write_general_stats': False}}]) # depends on [control=['if'], data=[]]
else:
module_order.append('bcftools')
module_order.extend(['salmon', 'picard', 'qualimap', 'snpeff', 'fastqc', 'preseq'])
out['module_order'] = module_order
preseq_samples = [s for s in samples if tz.get_in(['config', 'algorithm', 'preseq'], s)]
if preseq_samples:
out['preseq'] = _make_preseq_multiqc_config(preseq_samples) # depends on [control=['if'], data=[]]
with open(out_file, 'w') as out_handle:
yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False) # depends on [control=['with'], data=['out_handle']]
return out_file
|
def p_expression_eql(self, p):
'expression : expression EQL expression'
p[0] = Eql(p[1], p[3], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def function[p_expression_eql, parameter[self, p]]:
constant[expression : expression EQL expression]
call[name[p]][constant[0]] assign[=] call[name[Eql], parameter[call[name[p]][constant[1]], call[name[p]][constant[3]]]]
call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
|
keyword[def] identifier[p_expression_eql] ( identifier[self] , identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[Eql] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
|
def p_expression_eql(self, p):
"""expression : expression EQL expression"""
p[0] = Eql(p[1], p[3], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def create(self, sip_domain_sid):
"""
Create a new TerminatingSipDomainInstance
:param unicode sip_domain_sid: The SID of the SIP Domain to associate with the trunk
:returns: Newly created TerminatingSipDomainInstance
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainInstance
"""
data = values.of({'SipDomainSid': sip_domain_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return TerminatingSipDomainInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
|
def function[create, parameter[self, sip_domain_sid]]:
constant[
Create a new TerminatingSipDomainInstance
:param unicode sip_domain_sid: The SID of the SIP Domain to associate with the trunk
:returns: Newly created TerminatingSipDomainInstance
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da2054a6b90>], [<ast.Name object at 0x7da2054a5f90>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[TerminatingSipDomainInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[sip_domain_sid] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({ literal[string] : identifier[sip_domain_sid] ,})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[TerminatingSipDomainInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[trunk_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
|
def create(self, sip_domain_sid):
"""
Create a new TerminatingSipDomainInstance
:param unicode sip_domain_sid: The SID of the SIP Domain to associate with the trunk
:returns: Newly created TerminatingSipDomainInstance
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainInstance
"""
data = values.of({'SipDomainSid': sip_domain_sid})
payload = self._version.create('POST', self._uri, data=data)
return TerminatingSipDomainInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'])
|
def get_tmpdir(requested_tmpdir=None, prefix="", create=True):
'''get a temporary directory for an operation. If SREGISTRY_TMPDIR
is set, return that. Otherwise, return the output of tempfile.mkdtemp
Parameters
==========
requested_tmpdir: an optional requested temporary directory, first
priority as is coming from calling function.
prefix: Given a need for a sandbox (or similar), we will need to
create a subfolder *within* the SREGISTRY_TMPDIR.
create: boolean to determine if we should create folder (True)
'''
from sregistry.defaults import SREGISTRY_TMPDIR
# First priority for the base goes to the user requested.
tmpdir = requested_tmpdir or SREGISTRY_TMPDIR
prefix = prefix or "sregistry-tmp"
prefix = "%s.%s" %(prefix, next(tempfile._get_candidate_names()))
tmpdir = os.path.join(tmpdir, prefix)
if not os.path.exists(tmpdir) and create is True:
os.mkdir(tmpdir)
return tmpdir
|
def function[get_tmpdir, parameter[requested_tmpdir, prefix, create]]:
constant[get a temporary directory for an operation. If SREGISTRY_TMPDIR
is set, return that. Otherwise, return the output of tempfile.mkdtemp
Parameters
==========
requested_tmpdir: an optional requested temporary directory, first
priority as is coming from calling function.
prefix: Given a need for a sandbox (or similar), we will need to
create a subfolder *within* the SREGISTRY_TMPDIR.
create: boolean to determine if we should create folder (True)
]
from relative_module[sregistry.defaults] import module[SREGISTRY_TMPDIR]
variable[tmpdir] assign[=] <ast.BoolOp object at 0x7da1b05c6da0>
variable[prefix] assign[=] <ast.BoolOp object at 0x7da1b05c7f10>
variable[prefix] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05c7460>, <ast.Call object at 0x7da1b05c60e0>]]]
variable[tmpdir] assign[=] call[name[os].path.join, parameter[name[tmpdir], name[prefix]]]
if <ast.BoolOp object at 0x7da1b05c7b20> begin[:]
call[name[os].mkdir, parameter[name[tmpdir]]]
return[name[tmpdir]]
|
keyword[def] identifier[get_tmpdir] ( identifier[requested_tmpdir] = keyword[None] , identifier[prefix] = literal[string] , identifier[create] = keyword[True] ):
literal[string]
keyword[from] identifier[sregistry] . identifier[defaults] keyword[import] identifier[SREGISTRY_TMPDIR]
identifier[tmpdir] = identifier[requested_tmpdir] keyword[or] identifier[SREGISTRY_TMPDIR]
identifier[prefix] = identifier[prefix] keyword[or] literal[string]
identifier[prefix] = literal[string] %( identifier[prefix] , identifier[next] ( identifier[tempfile] . identifier[_get_candidate_names] ()))
identifier[tmpdir] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmpdir] , identifier[prefix] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[tmpdir] ) keyword[and] identifier[create] keyword[is] keyword[True] :
identifier[os] . identifier[mkdir] ( identifier[tmpdir] )
keyword[return] identifier[tmpdir]
|
def get_tmpdir(requested_tmpdir=None, prefix='', create=True):
"""get a temporary directory for an operation. If SREGISTRY_TMPDIR
is set, return that. Otherwise, return the output of tempfile.mkdtemp
Parameters
==========
requested_tmpdir: an optional requested temporary directory, first
priority as is coming from calling function.
prefix: Given a need for a sandbox (or similar), we will need to
create a subfolder *within* the SREGISTRY_TMPDIR.
create: boolean to determine if we should create folder (True)
"""
from sregistry.defaults import SREGISTRY_TMPDIR
# First priority for the base goes to the user requested.
tmpdir = requested_tmpdir or SREGISTRY_TMPDIR
prefix = prefix or 'sregistry-tmp'
prefix = '%s.%s' % (prefix, next(tempfile._get_candidate_names()))
tmpdir = os.path.join(tmpdir, prefix)
if not os.path.exists(tmpdir) and create is True:
os.mkdir(tmpdir) # depends on [control=['if'], data=[]]
return tmpdir
|
def jx_type(column):
"""
return the jx_type for given column
"""
if column.es_column.endswith(EXISTS_TYPE):
return EXISTS
return es_type_to_json_type[column.es_type]
|
def function[jx_type, parameter[column]]:
constant[
return the jx_type for given column
]
if call[name[column].es_column.endswith, parameter[name[EXISTS_TYPE]]] begin[:]
return[name[EXISTS]]
return[call[name[es_type_to_json_type]][name[column].es_type]]
|
keyword[def] identifier[jx_type] ( identifier[column] ):
literal[string]
keyword[if] identifier[column] . identifier[es_column] . identifier[endswith] ( identifier[EXISTS_TYPE] ):
keyword[return] identifier[EXISTS]
keyword[return] identifier[es_type_to_json_type] [ identifier[column] . identifier[es_type] ]
|
def jx_type(column):
"""
return the jx_type for given column
"""
if column.es_column.endswith(EXISTS_TYPE):
return EXISTS # depends on [control=['if'], data=[]]
return es_type_to_json_type[column.es_type]
|
def handle_client_stream(self, stream, is_unix=False):
""" Handles stream of data received from client. """
assert stream
data = []
stream.settimeout(2)
while True:
try:
if is_unix:
buf = stream.recv(1024)
else:
buf = stream.read(1024)
if not buf:
break
data.append(buf)
except (AttributeError, ValueError) as message:
logger.error(message)
return
except (ssl.SSLError) as exception:
logger.debug('Error: %s', exception[0])
break
except (socket.timeout) as exception:
logger.debug('Error: %s', exception)
break
data = b''.join(data)
if len(data) <= 0:
logger.debug("Empty client stream")
return
try:
response = self.handle_command(data)
except OSPDError as exception:
response = exception.as_xml()
logger.debug('Command error: %s', exception.message)
except Exception:
logger.exception('While handling client command:')
exception = OSPDError('Fatal error', 'error')
response = exception.as_xml()
if is_unix:
send_method = stream.send
else:
send_method = stream.write
self.write_to_stream(send_method, response)
|
def function[handle_client_stream, parameter[self, stream, is_unix]]:
constant[ Handles stream of data received from client. ]
assert[name[stream]]
variable[data] assign[=] list[[]]
call[name[stream].settimeout, parameter[constant[2]]]
while constant[True] begin[:]
<ast.Try object at 0x7da207f02170>
variable[data] assign[=] call[constant[b''].join, parameter[name[data]]]
if compare[call[name[len], parameter[name[data]]] less_or_equal[<=] constant[0]] begin[:]
call[name[logger].debug, parameter[constant[Empty client stream]]]
return[None]
<ast.Try object at 0x7da207f033a0>
if name[is_unix] begin[:]
variable[send_method] assign[=] name[stream].send
call[name[self].write_to_stream, parameter[name[send_method], name[response]]]
|
keyword[def] identifier[handle_client_stream] ( identifier[self] , identifier[stream] , identifier[is_unix] = keyword[False] ):
literal[string]
keyword[assert] identifier[stream]
identifier[data] =[]
identifier[stream] . identifier[settimeout] ( literal[int] )
keyword[while] keyword[True] :
keyword[try] :
keyword[if] identifier[is_unix] :
identifier[buf] = identifier[stream] . identifier[recv] ( literal[int] )
keyword[else] :
identifier[buf] = identifier[stream] . identifier[read] ( literal[int] )
keyword[if] keyword[not] identifier[buf] :
keyword[break]
identifier[data] . identifier[append] ( identifier[buf] )
keyword[except] ( identifier[AttributeError] , identifier[ValueError] ) keyword[as] identifier[message] :
identifier[logger] . identifier[error] ( identifier[message] )
keyword[return]
keyword[except] ( identifier[ssl] . identifier[SSLError] ) keyword[as] identifier[exception] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[exception] [ literal[int] ])
keyword[break]
keyword[except] ( identifier[socket] . identifier[timeout] ) keyword[as] identifier[exception] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[exception] )
keyword[break]
identifier[data] = literal[string] . identifier[join] ( identifier[data] )
keyword[if] identifier[len] ( identifier[data] )<= literal[int] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return]
keyword[try] :
identifier[response] = identifier[self] . identifier[handle_command] ( identifier[data] )
keyword[except] identifier[OSPDError] keyword[as] identifier[exception] :
identifier[response] = identifier[exception] . identifier[as_xml] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[exception] . identifier[message] )
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] ( literal[string] )
identifier[exception] = identifier[OSPDError] ( literal[string] , literal[string] )
identifier[response] = identifier[exception] . identifier[as_xml] ()
keyword[if] identifier[is_unix] :
identifier[send_method] = identifier[stream] . identifier[send]
keyword[else] :
identifier[send_method] = identifier[stream] . identifier[write]
identifier[self] . identifier[write_to_stream] ( identifier[send_method] , identifier[response] )
|
def handle_client_stream(self, stream, is_unix=False):
""" Handles stream of data received from client. """
assert stream
data = []
stream.settimeout(2)
while True:
try:
if is_unix:
buf = stream.recv(1024) # depends on [control=['if'], data=[]]
else:
buf = stream.read(1024)
if not buf:
break # depends on [control=['if'], data=[]]
data.append(buf) # depends on [control=['try'], data=[]]
except (AttributeError, ValueError) as message:
logger.error(message)
return # depends on [control=['except'], data=['message']]
except ssl.SSLError as exception:
logger.debug('Error: %s', exception[0])
break # depends on [control=['except'], data=['exception']]
except socket.timeout as exception:
logger.debug('Error: %s', exception)
break # depends on [control=['except'], data=['exception']] # depends on [control=['while'], data=[]]
data = b''.join(data)
if len(data) <= 0:
logger.debug('Empty client stream')
return # depends on [control=['if'], data=[]]
try:
response = self.handle_command(data) # depends on [control=['try'], data=[]]
except OSPDError as exception:
response = exception.as_xml()
logger.debug('Command error: %s', exception.message) # depends on [control=['except'], data=['exception']]
except Exception:
logger.exception('While handling client command:')
exception = OSPDError('Fatal error', 'error')
response = exception.as_xml() # depends on [control=['except'], data=[]]
if is_unix:
send_method = stream.send # depends on [control=['if'], data=[]]
else:
send_method = stream.write
self.write_to_stream(send_method, response)
|
def _find_scalac_plugins(self, scalac_plugins, classpath):
"""Returns a map from plugin name to list of plugin classpath entries.
The first entry in each list is the classpath entry containing the plugin metadata.
The rest are the internal transitive deps of the plugin.
This allows us to have in-repo plugins with dependencies (unlike javac, scalac doesn't load
plugins or their deps from the regular classpath, so we have to provide these entries
separately, in the -Xplugin: flag).
Note that we don't currently support external plugins with dependencies, as we can't know which
external classpath elements are required, and we'd have to put the entire external classpath
on each -Xplugin: flag, which seems excessive.
Instead, external plugins should be published as "fat jars" (which appears to be the norm,
since SBT doesn't support plugins with dependencies anyway).
"""
# Allow multiple flags and also comma-separated values in a single flag.
plugin_names = {p for val in scalac_plugins for p in val.split(',')}
if not plugin_names:
return {}
active_plugins = {}
buildroot = get_buildroot()
cp_product = self.context.products.get_data('runtime_classpath')
for classpath_element in classpath:
name = self._maybe_get_plugin_name(classpath_element)
if name in plugin_names:
plugin_target_closure = self._plugin_targets('scalac').get(name, [])
# It's important to use relative paths, as the compiler flags get embedded in the zinc
# analysis file, and we port those between systems via the artifact cache.
rel_classpath_elements = [
os.path.relpath(cpe, buildroot) for cpe in
ClasspathUtil.internal_classpath(plugin_target_closure, cp_product, self._confs)]
# If the plugin is external then rel_classpath_elements will be empty, so we take
# just the external jar itself.
rel_classpath_elements = rel_classpath_elements or [classpath_element]
# Some classpath elements may be repeated, so we allow for that here.
if active_plugins.get(name, rel_classpath_elements) != rel_classpath_elements:
raise TaskError('Plugin {} defined in {} and in {}'.format(name, active_plugins[name],
classpath_element))
active_plugins[name] = rel_classpath_elements
if len(active_plugins) == len(plugin_names):
# We've found all the plugins, so return now to spare us from processing
# of the rest of the classpath for no reason.
return active_plugins
# If we get here we must have unresolved plugins.
unresolved_plugins = plugin_names - set(active_plugins.keys())
raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins)))
|
def function[_find_scalac_plugins, parameter[self, scalac_plugins, classpath]]:
constant[Returns a map from plugin name to list of plugin classpath entries.
The first entry in each list is the classpath entry containing the plugin metadata.
The rest are the internal transitive deps of the plugin.
This allows us to have in-repo plugins with dependencies (unlike javac, scalac doesn't load
plugins or their deps from the regular classpath, so we have to provide these entries
separately, in the -Xplugin: flag).
Note that we don't currently support external plugins with dependencies, as we can't know which
external classpath elements are required, and we'd have to put the entire external classpath
on each -Xplugin: flag, which seems excessive.
Instead, external plugins should be published as "fat jars" (which appears to be the norm,
since SBT doesn't support plugins with dependencies anyway).
]
variable[plugin_names] assign[=] <ast.SetComp object at 0x7da18fe92b00>
if <ast.UnaryOp object at 0x7da18fe93430> begin[:]
return[dictionary[[], []]]
variable[active_plugins] assign[=] dictionary[[], []]
variable[buildroot] assign[=] call[name[get_buildroot], parameter[]]
variable[cp_product] assign[=] call[name[self].context.products.get_data, parameter[constant[runtime_classpath]]]
for taget[name[classpath_element]] in starred[name[classpath]] begin[:]
variable[name] assign[=] call[name[self]._maybe_get_plugin_name, parameter[name[classpath_element]]]
if compare[name[name] in name[plugin_names]] begin[:]
variable[plugin_target_closure] assign[=] call[call[name[self]._plugin_targets, parameter[constant[scalac]]].get, parameter[name[name], list[[]]]]
variable[rel_classpath_elements] assign[=] <ast.ListComp object at 0x7da1b2293c10>
variable[rel_classpath_elements] assign[=] <ast.BoolOp object at 0x7da1b2290790>
if compare[call[name[active_plugins].get, parameter[name[name], name[rel_classpath_elements]]] not_equal[!=] name[rel_classpath_elements]] begin[:]
<ast.Raise object at 0x7da1b2293d60>
call[name[active_plugins]][name[name]] assign[=] name[rel_classpath_elements]
if compare[call[name[len], parameter[name[active_plugins]]] equal[==] call[name[len], parameter[name[plugin_names]]]] begin[:]
return[name[active_plugins]]
variable[unresolved_plugins] assign[=] binary_operation[name[plugin_names] - call[name[set], parameter[call[name[active_plugins].keys, parameter[]]]]]
<ast.Raise object at 0x7da1b1d346a0>
|
keyword[def] identifier[_find_scalac_plugins] ( identifier[self] , identifier[scalac_plugins] , identifier[classpath] ):
literal[string]
identifier[plugin_names] ={ identifier[p] keyword[for] identifier[val] keyword[in] identifier[scalac_plugins] keyword[for] identifier[p] keyword[in] identifier[val] . identifier[split] ( literal[string] )}
keyword[if] keyword[not] identifier[plugin_names] :
keyword[return] {}
identifier[active_plugins] ={}
identifier[buildroot] = identifier[get_buildroot] ()
identifier[cp_product] = identifier[self] . identifier[context] . identifier[products] . identifier[get_data] ( literal[string] )
keyword[for] identifier[classpath_element] keyword[in] identifier[classpath] :
identifier[name] = identifier[self] . identifier[_maybe_get_plugin_name] ( identifier[classpath_element] )
keyword[if] identifier[name] keyword[in] identifier[plugin_names] :
identifier[plugin_target_closure] = identifier[self] . identifier[_plugin_targets] ( literal[string] ). identifier[get] ( identifier[name] ,[])
identifier[rel_classpath_elements] =[
identifier[os] . identifier[path] . identifier[relpath] ( identifier[cpe] , identifier[buildroot] ) keyword[for] identifier[cpe] keyword[in]
identifier[ClasspathUtil] . identifier[internal_classpath] ( identifier[plugin_target_closure] , identifier[cp_product] , identifier[self] . identifier[_confs] )]
identifier[rel_classpath_elements] = identifier[rel_classpath_elements] keyword[or] [ identifier[classpath_element] ]
keyword[if] identifier[active_plugins] . identifier[get] ( identifier[name] , identifier[rel_classpath_elements] )!= identifier[rel_classpath_elements] :
keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[name] , identifier[active_plugins] [ identifier[name] ],
identifier[classpath_element] ))
identifier[active_plugins] [ identifier[name] ]= identifier[rel_classpath_elements]
keyword[if] identifier[len] ( identifier[active_plugins] )== identifier[len] ( identifier[plugin_names] ):
keyword[return] identifier[active_plugins]
identifier[unresolved_plugins] = identifier[plugin_names] - identifier[set] ( identifier[active_plugins] . identifier[keys] ())
keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[list] ( identifier[unresolved_plugins] )))
|
def _find_scalac_plugins(self, scalac_plugins, classpath):
"""Returns a map from plugin name to list of plugin classpath entries.
The first entry in each list is the classpath entry containing the plugin metadata.
The rest are the internal transitive deps of the plugin.
This allows us to have in-repo plugins with dependencies (unlike javac, scalac doesn't load
plugins or their deps from the regular classpath, so we have to provide these entries
separately, in the -Xplugin: flag).
Note that we don't currently support external plugins with dependencies, as we can't know which
external classpath elements are required, and we'd have to put the entire external classpath
on each -Xplugin: flag, which seems excessive.
Instead, external plugins should be published as "fat jars" (which appears to be the norm,
since SBT doesn't support plugins with dependencies anyway).
"""
# Allow multiple flags and also comma-separated values in a single flag.
plugin_names = {p for val in scalac_plugins for p in val.split(',')}
if not plugin_names:
return {} # depends on [control=['if'], data=[]]
active_plugins = {}
buildroot = get_buildroot()
cp_product = self.context.products.get_data('runtime_classpath')
for classpath_element in classpath:
name = self._maybe_get_plugin_name(classpath_element)
if name in plugin_names:
plugin_target_closure = self._plugin_targets('scalac').get(name, [])
# It's important to use relative paths, as the compiler flags get embedded in the zinc
# analysis file, and we port those between systems via the artifact cache.
rel_classpath_elements = [os.path.relpath(cpe, buildroot) for cpe in ClasspathUtil.internal_classpath(plugin_target_closure, cp_product, self._confs)]
# If the plugin is external then rel_classpath_elements will be empty, so we take
# just the external jar itself.
rel_classpath_elements = rel_classpath_elements or [classpath_element]
# Some classpath elements may be repeated, so we allow for that here.
if active_plugins.get(name, rel_classpath_elements) != rel_classpath_elements:
raise TaskError('Plugin {} defined in {} and in {}'.format(name, active_plugins[name], classpath_element)) # depends on [control=['if'], data=[]]
active_plugins[name] = rel_classpath_elements
if len(active_plugins) == len(plugin_names):
# We've found all the plugins, so return now to spare us from processing
# of the rest of the classpath for no reason.
return active_plugins # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['name', 'plugin_names']] # depends on [control=['for'], data=['classpath_element']]
# If we get here we must have unresolved plugins.
unresolved_plugins = plugin_names - set(active_plugins.keys())
raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins)))
|
def replace(self, **updates):
"""Return a new profile with the given updates.
Unspecified fields will be the same as this instance. See `__new__`
for details on the arguments.
"""
state = self.dump()
state.update(updates)
return self.__class__(**state)
|
def function[replace, parameter[self]]:
constant[Return a new profile with the given updates.
Unspecified fields will be the same as this instance. See `__new__`
for details on the arguments.
]
variable[state] assign[=] call[name[self].dump, parameter[]]
call[name[state].update, parameter[name[updates]]]
return[call[name[self].__class__, parameter[]]]
|
keyword[def] identifier[replace] ( identifier[self] ,** identifier[updates] ):
literal[string]
identifier[state] = identifier[self] . identifier[dump] ()
identifier[state] . identifier[update] ( identifier[updates] )
keyword[return] identifier[self] . identifier[__class__] (** identifier[state] )
|
def replace(self, **updates):
"""Return a new profile with the given updates.
Unspecified fields will be the same as this instance. See `__new__`
for details on the arguments.
"""
state = self.dump()
state.update(updates)
return self.__class__(**state)
|
def getAccountRole(store, accountNames):
"""
Retrieve the first Role in the given store which corresponds an account
name in C{accountNames}.
Note: the implementation currently ignores all of the values in
C{accountNames} except for the first.
@param accountNames: A C{list} of two-tuples of account local parts and
domains.
@raise ValueError: If C{accountNames} is empty.
@rtype: L{Role}
"""
for (localpart, domain) in accountNames:
return getPrimaryRole(store, u'%s@%s' % (localpart, domain),
createIfNotFound=True)
raise ValueError("Cannot get named role for unnamed account.")
|
def function[getAccountRole, parameter[store, accountNames]]:
constant[
Retrieve the first Role in the given store which corresponds an account
name in C{accountNames}.
Note: the implementation currently ignores all of the values in
C{accountNames} except for the first.
@param accountNames: A C{list} of two-tuples of account local parts and
domains.
@raise ValueError: If C{accountNames} is empty.
@rtype: L{Role}
]
for taget[tuple[[<ast.Name object at 0x7da1b0a4c520>, <ast.Name object at 0x7da1b0a4fb80>]]] in starred[name[accountNames]] begin[:]
return[call[name[getPrimaryRole], parameter[name[store], binary_operation[constant[%s@%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a6dea0>, <ast.Name object at 0x7da1b0a6c400>]]]]]]
<ast.Raise object at 0x7da1b0a6d960>
|
keyword[def] identifier[getAccountRole] ( identifier[store] , identifier[accountNames] ):
literal[string]
keyword[for] ( identifier[localpart] , identifier[domain] ) keyword[in] identifier[accountNames] :
keyword[return] identifier[getPrimaryRole] ( identifier[store] , literal[string] %( identifier[localpart] , identifier[domain] ),
identifier[createIfNotFound] = keyword[True] )
keyword[raise] identifier[ValueError] ( literal[string] )
|
def getAccountRole(store, accountNames):
"""
Retrieve the first Role in the given store which corresponds an account
name in C{accountNames}.
Note: the implementation currently ignores all of the values in
C{accountNames} except for the first.
@param accountNames: A C{list} of two-tuples of account local parts and
domains.
@raise ValueError: If C{accountNames} is empty.
@rtype: L{Role}
"""
for (localpart, domain) in accountNames:
return getPrimaryRole(store, u'%s@%s' % (localpart, domain), createIfNotFound=True) # depends on [control=['for'], data=[]]
raise ValueError('Cannot get named role for unnamed account.')
|
def to_python(self, value):
""" Return a str representation of the hexadecimal """
if isinstance(value, six.string_types):
return value
if value is None:
return value
return _unsigned_integer_to_hex_string(value)
|
def function[to_python, parameter[self, value]]:
constant[ Return a str representation of the hexadecimal ]
if call[name[isinstance], parameter[name[value], name[six].string_types]] begin[:]
return[name[value]]
if compare[name[value] is constant[None]] begin[:]
return[name[value]]
return[call[name[_unsigned_integer_to_hex_string], parameter[name[value]]]]
|
keyword[def] identifier[to_python] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[value]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[value]
keyword[return] identifier[_unsigned_integer_to_hex_string] ( identifier[value] )
|
def to_python(self, value):
""" Return a str representation of the hexadecimal """
if isinstance(value, six.string_types):
return value # depends on [control=['if'], data=[]]
if value is None:
return value # depends on [control=['if'], data=['value']]
return _unsigned_integer_to_hex_string(value)
|
def get_idp_choices():
"""
Get a list of identity providers choices for enterprise customer.
Return:
A list of choices of all identity providers, None if it can not get any available identity provider.
"""
try:
from third_party_auth.provider import Registry # pylint: disable=redefined-outer-name
except ImportError as exception:
LOGGER.warning("Could not import Registry from third_party_auth.provider")
LOGGER.warning(exception)
Registry = None # pylint: disable=redefined-outer-name
first = [("", "-" * 7)]
if Registry:
return first + [(idp.provider_id, idp.name) for idp in Registry.enabled()]
return None
|
def function[get_idp_choices, parameter[]]:
constant[
Get a list of identity providers choices for enterprise customer.
Return:
A list of choices of all identity providers, None if it can not get any available identity provider.
]
<ast.Try object at 0x7da18f09c190>
variable[first] assign[=] list[[<ast.Tuple object at 0x7da1b013f550>]]
if name[Registry] begin[:]
return[binary_operation[name[first] + <ast.ListComp object at 0x7da1b013fa90>]]
return[constant[None]]
|
keyword[def] identifier[get_idp_choices] ():
literal[string]
keyword[try] :
keyword[from] identifier[third_party_auth] . identifier[provider] keyword[import] identifier[Registry]
keyword[except] identifier[ImportError] keyword[as] identifier[exception] :
identifier[LOGGER] . identifier[warning] ( literal[string] )
identifier[LOGGER] . identifier[warning] ( identifier[exception] )
identifier[Registry] = keyword[None]
identifier[first] =[( literal[string] , literal[string] * literal[int] )]
keyword[if] identifier[Registry] :
keyword[return] identifier[first] +[( identifier[idp] . identifier[provider_id] , identifier[idp] . identifier[name] ) keyword[for] identifier[idp] keyword[in] identifier[Registry] . identifier[enabled] ()]
keyword[return] keyword[None]
|
def get_idp_choices():
"""
Get a list of identity providers choices for enterprise customer.
Return:
A list of choices of all identity providers, None if it can not get any available identity provider.
"""
try:
from third_party_auth.provider import Registry # pylint: disable=redefined-outer-name # depends on [control=['try'], data=[]]
except ImportError as exception:
LOGGER.warning('Could not import Registry from third_party_auth.provider')
LOGGER.warning(exception)
Registry = None # pylint: disable=redefined-outer-name # depends on [control=['except'], data=['exception']]
first = [('', '-' * 7)]
if Registry:
return first + [(idp.provider_id, idp.name) for idp in Registry.enabled()] # depends on [control=['if'], data=[]]
return None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.