text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
#!/usr/bin/env python
from core import geo, pmap
import numpy as np
from datetime import timedelta
import glob
import os
from netcdf import netcdf as nc
from cache import Cache, Loader
from helpers import short
import logging
class Heliosat2(object):
def __init__(self, config, strategy_type):
self.config = config
self.filenames = config['data']
self.SAT_LON = -75.113
# -75.3305 # longitude of sub-satellite point in degrees
self.IMAGE_PER_HOUR = 2
self.GOES_OBSERVED_ALBEDO_CALIBRATION = 1.89544 * (10 ** (-3))
self.i0met = np.pi / self.GOES_OBSERVED_ALBEDO_CALIBRATION
self.strategy_type = strategy_type
self.cache = TemporalCache(self)
def create_1px_dimensions(self, root):
nc.getdim(root, 'xc_k', 1)
nc.getdim(root, 'yc_k', 1)
nc.getdim(root, 'time', 1)
def create_slots(self, loader, cache, strategy):
self.create_1px_dimensions(cache)
time = loader.time
shape = list(time.shape)
shape.append(1)
strategy.times = time.reshape(tuple(shape))
strategy.slots = cache.getvar('slots', 'i1', ('time', 'yc_k', 'xc_k'))
strategy.slots[:] = strategy.calculate_slots(self.IMAGE_PER_HOUR)
nc.sync(cache)
def create_variables(self, loader, cache, strategy):
self.create_slots(loader, cache, strategy)
self.create_temporal(loader, cache, strategy)
def create_temporal(self, loader, cache, strategy):
create_f = lambda name, source: cache.getvar(name, 'f4', source=source)
create = lambda name, source: cache.getvar(name, source=source)
strategy.declination = create_f('declination', strategy.slots)
strategy.solarangle = create_f('solarangle', loader.ref_data)
nc.sync(cache)
strategy.solarelevation = create('solarelevation', strategy.solarangle)
strategy.excentricity = create_f('excentricity', strategy.slots)
strategy.gc = create('gc', strategy.solarangle)
strategy.atmosphericalbedo = create('atmosphericalbedo',
strategy.solarangle)
strategy.t_sat = create('t_sat', loader.ref_lon)
strategy.t_earth = create('t_earth', strategy.solarangle)
strategy.cloudalbedo = create('cloudalbedo', strategy.solarangle)
nc.sync(cache)
def update_temporalcache(self, loader, cache):
logging.info("Updating temporal cache... ")
self.strategy = self.strategy_type(self, loader, cache)
self.strategy.update_temporalcache(loader, cache)
def estimate_globalradiation(self, loader, cache):
# There is nothing to do, if there isn't new cache and strategy setted.
if hasattr(self, 'strategy'):
logging.info("Obtaining the global radiation... ")
output = OutputCache(self)
self.strategy.estimate_globalradiation(loader, cache, output)
output.dump()
cache.dump()
def run_with(self, loader):
self.estimate_globalradiation(loader, self.cache)
class AlgorithmCache(Cache):
def __init__(self, algorithm):
super(AlgorithmCache, self).__init__()
self.algorithm = algorithm
self.tile_config = self.algorithm.config['tile_cut']
self.filenames = self.algorithm.filenames
self.initialize_path(self.filenames)
class TemporalCache(AlgorithmCache):
def __init__(self, algorithm):
super(TemporalCache, self).__init__(algorithm)
self.update_cache(self.filenames)
self.cache = Loader(pmap(self.get_cached_file, self.filenames),
tile_cut=self.tile_config)
self.root = self.cache.root
def initialize_path(self, filenames):
self.path = '/'.join(filenames[0].split('/')[0:-1])
self.temporal_path = self.algorithm.config['temporal_cache']
self.index = {self.get_cached_file(v): v for v in filenames}
if not os.path.exists(self.temporal_path):
os.makedirs(self.temporal_path)
def get_cached_file(self, filename):
return '%s/%s' % (self.temporal_path, short(filename, None, None))
def update_cache(self, filenames):
self.clean_cache(filenames)
self.extend_cache(filenames)
def extend_cache(self, filenames):
cached_files = glob.glob('%s/*.nc' % self.temporal_path)
not_cached = filter(lambda f: self.get_cached_file(f)
not in cached_files,
filenames)
if not_cached:
loader = Loader(not_cached, self.tile_config)
new_files = pmap(self.get_cached_file, not_cached)
with nc.loader(new_files, dimensions=self.tile_config) as cache:
self.algorithm.update_temporalcache(loader, cache)
loader.dump()
def clean_cache(self, exceptions):
cached_files = glob.glob('%s/*.nc' % self.temporal_path)
old_cache = filter(lambda f: self.index[f] not in exceptions,
cached_files)
pmap(os.remove, old_cache)
def getvar(self, *args, **kwargs):
name = args[0]
if name not in self._attrs.keys():
tmp = list(args)
tmp.insert(0, self.cache.root)
self._attrs[name] = nc.getvar(*tmp, **kwargs)
return self._attrs[name]
class OutputCache(AlgorithmCache):
def __init__(self, algorithm):
super(OutputCache, self).__init__(algorithm)
self.output = Loader(pmap(self.get_output_file, self.filenames),
tile_cut=self.tile_config)
self.root = self.output.root
with nc.loader(self.filenames, dimensions=self.tile_config) as images:
map(algorithm.create_1px_dimensions, self.root.roots)
self.root.getvar('time', source=images.getvar('time'))
self.root.getvar('cloudindex',
'f4', source=images.getvar('data'))
self.root.getvar('globalradiation',
'f4', source=images.getvar('data'))
def initialize_path(self, filenames):
self.path = '/'.join(filenames[0].split('/')[0:-1])
self.output_path = self.algorithm.config['product']
self.index = {self.get_output_file(v): v for v in filenames}
if not os.path.exists(self.output_path):
os.makedirs(self.output_path)
def get_output_file(self, filename):
return '%s/%s' % (self.output_path, short(filename, None, None))
def run(**config):
loader = Loader(config['data'], tile_cut=config['tile_cut'])
algorithm = Heliosat2(config, geo.strategy)
algorithm.run_with(loader)
loader.dump()
|
scottlittle/solar_radiation_model
|
models/heliosat.py
|
Python
|
mit
| 6,744
|
[
"NetCDF"
] |
cef22d1d1c45f76e03b5735602cf4526344495b7e88302b2e65c47f5472033cc
|
import numpy as np
import bayesianoracle as bo
import bayesianoracle.plot as boplotter
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
from matplotlib import colors as cl
from matplotlib import gridspec, ticker
# Import function information
from function_data import *
execfile("function_data.py")
def plot_prior(bmao, model_ind, precision_alpha, precision_beta, bias_lambda):
""" Auxillary plotting function
Parameters
----------
bmao : Bayesian model averaging optimization process
X : The values that have been previously traversed
mode : mode = "predict" if the GaussianProcessEI prediction is disired
or mode = "EI" if the expected improvement is desired
k_fig : The suffix seed for saving the figure
"""
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
from matplotlib import colors as cl
from matplotlib import gridspec
boplt = boplotter.Plotter1D(x_range=x_range, y_range=y_range, num_points=num_points)
boplt.set_bma(bmao.bma)
### Plot the data and the models
fig, ax = plt.subplots()
# Plot the heatmap of probabilties, THEN the function THEN mean line
boplt.plot_model(ax, model_ind=model_ind, bool_dataless=True, color='k', linestyle='-')
func_line = boplt.plot_fun(ax, fun)
mean_line = boplt.plot_model_mean(ax, model_ind=model_ind, color=boplt.colorcycle[model_ind], linestyle='-')
# Create legend
legend = plt.legend([mean_line, func_line],
['Quadratic Approximation', 'True Mean Function'],
loc='upper center', bbox_to_anchor=(0.5, 1.075), ncol=1, fancybox=True, shadow=False, scatterpoints=1)
plt.setp(legend.get_texts(), fontsize=12)
plt.savefig("StatisticalQuadraticModels1D_figures/"+str(model_ind)+"_"+str(precision_alpha)+"_"+str(precision_beta)+"_"+str(bias_lambda)+"_prior.png", dpi=dpi)
plt.close(fig)
def plot_model(bmao, X, y_hist, model_ind, kernel_range, precision_alpha, precision_beta, bias_lambda):
""" Auxillary plotting function
Parameters
----------
bmao : Bayesian model averaging optimization process
X : The values that have been previously traversed
mode : mode = "predict" if the GaussianProcessEI prediction is disired
or mode = "EI" if the expected improvement is desired
k_fig : The suffix seed for saving the figure
"""
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
from matplotlib import colors as cl
from matplotlib import gridspec
boplt = boplotter.Plotter1D(x_range=x_range, y_range=y_range, num_points=num_points)
boplt.set_bma(bmao.bma)
### Plot the data and the models
fig, ax = plt.subplots()
# Plot the heatmap of probabilties, THEN the function THEN mean line THEN data
boplt.plot_model(ax, model_ind=model_ind, kernel_range=kernel_range, bool_dataless=False, color='k', linestyle='-')
func_line = boplt.plot_fun(ax, fun)
mean_line = boplt.plot_biased_model_mean(ax, model_ind=model_ind, kernel_range=kernel_range, color=boplt.colorcycle[model_ind], linestyle='-')
scat = boplt.plot_data(ax, X, y_hist, bool_color_cycled=True)
# Create legend
legend = plt.legend([mean_line, func_line, scat],
['Quadratic Approximation', 'True Mean Function', 'Data'],
loc='upper center', bbox_to_anchor=(0.5, 1.075), ncol=2, fancybox=True, shadow=False, scatterpoints=1)
legend.legendHandles[2]._sizes = [30]
plt.setp(legend.get_texts(), fontsize=12)
plt.savefig("StatisticalQuadraticModels1D_figures/"+str(model_ind)+"_predictive_"+str(kernel_range)+"_"+str(precision_alpha)+"_"+str(precision_beta)+"_"+str(bias_lambda)+".png", dpi=dpi)
plt.close(fig)
def plot_everything(bmao, X, y_hist, model_ind, sets, kernel_range):
n_subplot = 10
fig = plt.figure(figsize=(8, 12), dpi=dpi)
gs = gridspec.GridSpec(n_subplot/2, 2, height_ratios=[10, 10, 10, 10, 1])
# Make axes for plots
ax = []
for i in range(n_subplot-2):
ax.append(plt.subplot(gs[i]))
# Make axis for colorbar
ax_cb = plt.subplot(gs[-1,:])
row_i = 0
for (precision_alpha, precision_beta, bias_lambda) in sets:
# Set parameter
bmao.set_precision_prior_params(precision_alpha, precision_beta)
bmao.set_bias_prior_params(bias_lambda)
boplt = boplotter.Plotter1D(x_range=x_range, y_range=y_range, num_points=num_points)
boplt.set_bma(bmao.bma)
# Plot Prior
heatmap = boplt.plot_model(ax[row_i], model_ind=model_ind, bool_dataless=True, color='k', linestyle='-', bool_colorbar=False, xlabel=None, upper=0.4)
func_line = boplt.plot_fun(ax[row_i], fun, xlabel=None, color='black')
mean_line = boplt.plot_model_mean(ax[row_i], model_ind=model_ind, color=boplt.colorcycle[model_ind], linestyle='-', xlabel=None)
# Plot posterior
heatmap = boplt.plot_model(ax[row_i+1], model_ind=model_ind, kernel_range=kernel_range, bool_dataless=False, color='k', linestyle='-', bool_colorbar=False, xlabel=None, upper=0.4)
func_line = boplt.plot_fun(ax[row_i+1], fun, xlabel=None, color='black')
mean_line = boplt.plot_biased_model_mean(ax[row_i+1], model_ind=model_ind, kernel_range=kernel_range, color=boplt.colorcycle[model_ind], linestyle='-', xlabel=None)
scat = boplt.plot_data(ax[row_i+1], X, y_hist, bool_color_cycled=False, xlabel=None, edgecolor='white')
# Add right legend
#h = plt.ylabel(r'$\alpha='+str(precision_alpha)+r'$'+"\n"+
# r'$\beta='+str(precision_beta)+r'$'+"\n"+
# r'$\lambda='+str(bias_lambda)+r'$',
# rotation=0,
# multialignment='left',
# horizontalalignment='left',
# verticalalignment='center')
#ax[row_i+1].yaxis.set_label_position("right")
h = plt.ylabel(r'$\alpha='+str(precision_alpha)+r'$ '+
r'$\beta='+str(precision_beta)+r'$ '+
r'$\lambda='+str(bias_lambda)+r'$',
rotation=270,
multialignment='center',
verticalalignment='center')
#ax[row_i+1].yaxis.labelpad = 1.0
ax[row_i+1].yaxis.set_label_coords(1.05, 0.55)
# Custom colorbar on axis 2
cbar = fig.colorbar(heatmap, cax=ax_cb, orientation='horizontal')
cbar.set_label('probability')
#tick_locator = ticker.MaxNLocator(nbins=11)
#cbar.locator = tick_locator
#cbar.update_ticks()
row_i+=2
# Figure legend
legend = fig.legend([mean_line, func_line, scat],
['Model Mean', 'True Mean', 'Data'],
loc='upper center', bbox_to_anchor=(0.5, 0.97), ncol=2, fancybox=True, shadow=False, scatterpoints=1)
legend.legendHandles[2]._sizes = [30]
plt.setp(legend.get_texts(), fontsize=12)
# Set titles
ax[0].set_title(r'prior $y, p_{x}\left(y\mid \mathcal{M}, \gamma\right)$', y=1.09)
ax[1].set_title(r'posterior $y, p_{x}\left(y\mid \mathcal{M},\mathcal{D}, \gamma\right)$', y=1.09)
# Hide tick labels
for k in range(len(ax)):
ax[k].xaxis.set_ticklabels([])
if k % 2 == 1:
ax[k].yaxis.set_ticklabels([])
# Set last x labels
ax[6].set_xlabel(r'$x$')
ax[7].set_xlabel(r'$x$')
plt.tight_layout()
plt.subplots_adjust(left=0.05, right=0.95, top=0.95, bottom=0.05, wspace=0.075, hspace=0.2)
plt.savefig("StatisticalQuadraticModels1D_figures/"+str(model_ind)+"_"+str(kernel_range)+"_predictive_all.png", dpi=dpi)
bmao = bo.optimizer.QuadraticBMAOptimizer(ndim = 1,
init_kernel_range=0.2,
n_int=1,
precision_beta = 1000.0,
bias_lambda = 1.0,
constraints = [constr1, constr2],
bounding_box = bounding_box,
bool_compact = True,
kernel_type='Gaussian')
# Simulated sampling of the function.
X = None
y_hist = np.array([])
# Populate bmao
for k in xrange(X_complete.shape[1]):
# Get next in sequence
x_next = X_complete[:,k]
x = x_next
if k == 0:
X = np.array([x_next])
else:
X = np.hstack([X, np.array([x_next])])
# Get y, grad, hess from precomputed lists
f = f_complete[k]
grad = grad_complete[k]
Hess = Hess_complete[k]
y_hist = np.append(y_hist, f)
# Add observations to the bmao
bmao.add_observation(x, f, grad, Hess)
sets = [(2, 1000, 1),
(1.1, 100, 1),
(2, 20, 0.01),
(51.5, 1000, 0.01)]
model_inds = [1]
kernel_range = 0.25
# Try different betas
for model_ind in model_inds:
plot_everything(bmao, X, y_hist, model_ind, sets, kernel_range)
|
altaetran/bayesianoracle
|
tests/quadraticBayesianAveraging/paper_examples/StatisticalQuadraticModels1D.py
|
Python
|
apache-2.0
| 9,162
|
[
"Gaussian"
] |
da8f934861a5c4ea131128b95281e3e89e57f2cf692a86a3f30d17ec6af60550
|
########################################################################
# $HeadURL$
########################################################################
""" X509Certificate is a class for managing X509 certificates alone
"""
__RCSID__ = "$Id$"
import GSI
import os
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities import Time
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
# Not Used
# def _proxyExtensionList( ):
# return [ GSI.crypto.X509Extension( 'keyUsage', 'critical, digitalSignature, keyEncipherment, dataEncipherment' ) ]
class X509Certificate:
def __init__( self, x509Obj = None ):
self.__valid = False
if x509Obj:
self.__certObj = x509Obj
self.__valid = True
def load( self, certificate ):
""" Load a x509 certificate either from a file or from a string
"""
if os.path.exists( certificate ):
return self.loadFromFile( certificate )
else:
return self.loadFromString( certificate )
def loadFromFile( self, certLocation ):
"""
Load a x509 cert from a pem file
Return : S_OK / S_ERROR
"""
try:
fd = file( certLocation )
pemData = fd.read()
fd.close()
except IOError:
return S_ERROR( "Can't open %s file" % certLocation )
return self.loadFromString( pemData )
def loadFromString( self, pemData ):
"""
Load a x509 cert from a string containing the pem data
Return : S_OK / S_ERROR
"""
try:
self.__certObj = GSI.crypto.load_certificate( GSI.crypto.FILETYPE_PEM, pemData )
except Exception, e:
return S_ERROR( "Can't load pem data: %s" % str( e ) )
self.__valid = True
return S_OK()
def setCertificate( self, x509Obj ):
if type( x509Obj ) != GSI.crypto.X509Type:
return S_ERROR( "Object %s has to be of type X509" % str( x509Obj ) )
self.__certObj = x509Obj
self.__valid = True
return S_OK()
def hasExpired( self ):
"""
Check if a certificate file/proxy is still valid
Return: S_OK( True/False )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.has_expired() )
def getNotAfterDate( self ):
"""
Get not after date of a certificate
Return: S_OK( datetime )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_not_after() )
def getNotBeforeDate( self ):
"""
Get not before date of a certificate
Return: S_OK( datetime )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_not_before() )
def getSubjectDN( self ):
"""
Get subject DN
Return: S_OK( string )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_subject().one_line() )
def getIssuerDN( self ):
"""
Get issuer DN
Return: S_OK( string )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_issuer().one_line() )
def getSubjectNameObject( self ):
"""
Get subject name object
Return: S_OK( X509Name )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_subject() )
def getIssuerNameObject( self ):
"""
Get issuer name object
Return: S_OK( X509Name )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_issuer() )
def getPublicKey( self ):
"""
Get the public key of the certificate
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_pubkey() )
def getSerialNumber( self ):
"""
Get certificate serial number
Return: S_OK( serial )/S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
return S_OK( self.__certObj.get_serial_number() )
def getDIRACGroup( self, ignoreDefault = False ):
"""
Get the dirac group if present
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
extList = self.__certObj.get_extensions()
for ext in extList:
if ext.get_sn() == "diracGroup":
return S_OK( ext.get_value() )
if ignoreDefault:
return S_OK( False )
result = self.getIssuerDN()
if not result[ 'OK' ]:
return result
return Registry.findDefaultGroupForDN( result['Value'] )
def hasVOMSExtensions( self ):
"""
Has voms extensions
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
extList = self.__certObj.get_extensions()
for ext in extList:
if ext.get_sn() == "vomsExtensions":
return S_OK( True )
return S_OK( False )
def generateProxyRequest( self, bitStrength = 1024, limited = False ):
"""
Generate a proxy request
Return S_OK( X509Request ) / S_ERROR
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
if not limited:
subj = self.__certObj.get_subject()
lastEntry = subj.get_entry( subj.num_entries() - 1 )
if lastEntry[0] == 'CN' and lastEntry[1] == "limited proxy":
limited = True
from DIRAC.Core.Security.X509Request import X509Request
req = X509Request()
req.generateProxyRequest( bitStrength = bitStrength, limited = limited )
return S_OK( req )
def getRemainingSecs( self ):
"""
Get remaining lifetime in secs
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
notAfter = self.__certObj.get_not_after()
remaining = notAfter - Time.dateTime()
return S_OK( max( 0, remaining.days * 86400 + remaining.seconds ) )
def getExtensions( self ):
"""
Get a decoded list of extensions
"""
if not self.__valid:
return S_ERROR( "No certificate loaded" )
extList = []
for ext in self.__certObj.get_extensions():
sn = ext.get_sn()
try:
value = ext.get_value()
except Exception:
value = "Cannot decode value"
extList.append( ( sn, value ) )
return S_OK( sorted( extList ) )
|
Sbalbp/DIRAC
|
Core/Security/X509Certificate.py
|
Python
|
gpl-3.0
| 6,252
|
[
"DIRAC"
] |
3efcdfafe5f500b27c43fae5729b71eba460bf51c4435e57107c08cf8ff3de70
|
#!/usr/bin/env python
"""
Tests for writing and reading UGRID compliant netCDF.
"""
from __future__ import (absolute_import, division, print_function)
import os
import numpy as np
import pytest
from gridded.pyugrid.ugrid import UGrid
from gridded.pyugrid.uvar import UVar
from .utilities import chdir, two_triangles
pytestmark = pytest.mark.skipif(True, reason="gridded does not support UVars anymore")
test_files = os.path.join(os.path.dirname(__file__), 'files')
def test_with_faces(two_triangles):
"""
Test with faces, edges, but no `face_coordinates` or `edge_coordinates`.
"""
expected = two_triangles
fname = '2_triangles.nc'
with chdir(test_files):
expected.save_as_netcdf(fname)
grid = UGrid.from_ncfile(fname)
os.remove(fname)
assert np.array_equal(expected.nodes, grid.nodes)
assert np.array_equal(expected.faces, grid.faces)
assert np.array_equal(expected.edges, grid.edges)
def test_without_faces(two_triangles):
expected = two_triangles
del expected.faces
assert expected.faces is None
fname = '2_triangles.nc'
with chdir(test_files):
expected.save_as_netcdf(fname)
grid = UGrid.from_ncfile(fname)
os.remove(fname)
assert grid.faces is None
assert np.array_equal(expected.faces, grid.faces)
assert np.array_equal(expected.edges, grid.edges)
def test_with_just_nodes_and_depths(two_triangles):
expected = two_triangles
del expected.faces
del expected.edges
depth = UVar('depth',
'node',
np.array([1.0, 2.0, 3.0, 4.0]),
{'units': 'm',
'positive': 'down',
'standard_name': 'sea_floor_depth_below_geoid'})
expected.add_data(depth)
fname = '2_triangles_depth.nc'
with chdir(test_files):
expected.save_as_netcdf(fname)
grid = UGrid.from_ncfile(fname, load_data=True)
os.remove(fname)
assert grid.faces is None
assert grid.edges is None
assert np.array_equal(expected.nodes, grid.nodes)
assert np.array_equal(expected.data['depth'].data, grid.data['depth'].data)
assert expected.data['depth'].attributes == grid.data['depth'].attributes
if __name__ == "__main__":
test_with_faces()
test_without_faces()
|
NOAA-ORR-ERD/gridded
|
gridded/tests/test_ugrid/test_write_read.py
|
Python
|
unlicense
| 2,320
|
[
"NetCDF"
] |
362535126de50cb7f96f36e7a3013b44ffbcf6d369b256e9bb74124da4d25901
|
# coding=utf-8
# constants.py: Kort's Spellcrafting Calculator
#
# See http://www.github.com/artomason/KortsCalculator/ for updates
#
# See NOTICE.txt for copyrights and grant of license
from Character import *
from tuple2 import *
from dict2 import *
import sys
__all__ = [
'ScVersion', 'GemLists', 'DropLists', 'CraftedLists', 'TypeList', 'EffectTypeList', 'DropTypeList',
'CraftedTypeList', 'ValuesLists', 'CraftedValuesLists', 'QualityValues', 'ImbuePts', 'OCStartPercentages',
'ItemQualOCModifiers', 'FileExt', 'Caps', 'HighCapBonusList', 'MythicalCapBonusList', 'BodyHitOdds', 'GemTables',
'GemDusts', 'GemLiquids', 'GemSubName', 'MaterialsOrder', 'GemNames', 'MaterialGems', 'GemCosts', 'RemakeCosts',
'EffectTypeNames', 'ProcItemNames', 'StableItemNames', 'EffectMetal', 'FixTypeTable', 'FixEffectsTable',
'HotkeyGems', 'ImbueMultipliers', 'ShieldTypes', 'TabList', 'PieceTabList', 'JewelTabList', 'ArmorTabList',
'WeaponTabList', 'FocusTabList',
]
ScVersion = "Kort's Spellcrafting Calulator 3.0.3 (BETA)"
TypeList = t2(( # XFERED
'Unused',
'Stat',
'Resist',
'Focus',
'Skill',
))
# 5TH ALCHEMY IMBUE SLOT
EffectTypeList = t2((
'Offensive Effect',
'Reactive Effect',
'Charged Effect',
))
DropTypeList = t2( # XFERED
TypeList + ('Cap Increase', 'Mythical Bonus', 'PvE Bonus', 'Other Bonus',) +
EffectTypeList + ('Other Effect',)
)
EffectTypeList = t2(('Unused',) + EffectTypeList)
unusedTable = d2({})
unusedList = t2()
unusedValues = t2()
GemLiquids = d2({
'Fiery': 'Draconic Fire',
'Earthen': 'Treant Blood',
'Vapor': 'Swamp Fog',
'Airy': 'Air Elemental Essence',
'Heated': 'Heat From an Unearthly Pyre',
'Icy': 'Frost From a Wasteland',
'Watery': 'Leviathan Blood',
'Dusty': 'Undead Ash and Holy Water',
'Fire': 'Draconic Fire',
'Earth': 'Treant Blood',
'Vapor': 'Swamp Fog',
'Air': 'Air Elemental Essence',
'Heat': 'Heat From an Unearthly Pyre',
'Ice': 'Frost From a Wasteland',
'Water': 'Leviathan Blood',
'Dust': 'Undead Ash and Holy Water',
'Ashen': 'Undead Ash and Holy Water',
'Vacuous': 'Swamp Fog',
'Salt Crusted': 'Mystic Energy',
'Steaming Spell': 'Swamp Fog',
'Steaming Nature': 'Swamp Fog',
'Steaming Fervor': 'Heat From an Unearthly Pyre',
'Oozing': 'Treant Blood',
'Mineral Encrusted': 'Heat From an Unearthly Pyre',
'Lightning Charged': 'Leviathan Blood',
'Molten Magma': 'Leviathan Blood',
'Light': 'Sun Light',
'Blood': 'Giant Blood',
'Mystical': 'Mystic Energy',
'Mystic': 'Mystic Energy',
'Brilliant': ('Draconic Fire', 'Mystic Energy', 'Treant Blood'),
'Finesse': ('Draconic Fire', 'Mystic Energy', 'Treant Blood'),
'Ethereal Spell': 'Swamp Fog',
'Phantasmal Spell': 'Leviathan Blood',
'Spectral Spell': 'Draconic Fire',
'Ethereal Arcane': 'Leviathan Blood',
'Phantasmal Arcane': 'Draconic Fire',
'Spectral Arcane': 'Air Elemental Essence',
'Aberrant': 'Treant Blood',
'Embracing': 'Frost From a Wasteland',
'Shadowy': 'Swamp Fog',
'Blighted Primal': 'Air Elemental Essence',
'Blighted Rune': 'Undead Ash and Holy Water',
'Valiant': 'Swamp Fog',
'Unholy': 'Air Elemental Essence',
'Glacial': 'Frost From a Wasteland',
'Cinder': 'Draconic Fire',
'Radiant': 'Sun Light',
'Magnetic': 'Mystic Energy',
'Clout': 'Giant Blood',
})
GemDusts = d2({
'Essence Jewel': 'Essence of Life',
'Shielding Jewel': 'Ground Draconic Scales',
'Spell Stone': 'Ground Draconic Scales',
'Sigil': 'Ground Draconic Scales',
'Rune': 'Ground Draconic Scales',
'Chaos Rune': 'Soot From Niflheim',
'Battle Jewel': 'Bloodied Battlefield Dirt',
'War Rune': 'Ground Giant Bone',
'Primal Rune': 'Ground Vendo Bone',
'Evocation Sigil': 'Ground Cave Crystal',
'Fervor Sigil': 'Ground Blessed Undead Bone',
'War Sigil': 'Ground Caer Stone',
'Nature Spell Stone': 'Fairy Dust',
'War Spell Stone': 'Unseelie Dust',
'Arcane Spell Stone': 'Other Worldly Dust',
})
type = 'Essence Jewel'
statTableOrdered = (
('Strength', 'Fiery',),
('Constitution', 'Earthen',),
('Dexterity', 'Vapor',),
('Quickness', 'Airy',),
('Intelligence', 'Dusty',),
('Piety', 'Watery',),
('Charisma', 'Icy',),
('Empathy', 'Heated',),
('Power', 'Mystical',),
('Hits', 'Blood',),
)
statTable = dict(statTableOrdered)
for (key, val) in list(statTable.items()):
statTable[key] = (val, type, GemDusts[type], GemLiquids[val],)
statTable = d2(statTable)
statList = t2([x[0] for x in statTableOrdered])
del statTableOrdered
statValues = t2(('2', '5', '8', '11', '14', '17', '20', '23', '26', '29',))
hitsValues = t2(('4', '12', '20', '28', '36', '44', '52', '60', '68', '76',))
powerValues = t2(('1', '2', '3', '5', '7', '9', '11', '13', '15', '17'))
# DUPLICATE 'statList', ADD NON-CRAFTABLE 'Acuity' STAT
dropStatList = t2(statList + ('Acuity',))
dropStatTable = dict().fromkeys(dropStatList)
resistTableOrdered = (
('Body', 'Dusty',),
('Cold', 'Icy',),
('Heat', 'Heated',),
('Energy', 'Light',),
('Matter', 'Earthen',),
('Spirit', 'Vapor',),
('Crush', 'Fiery',),
('Thrust', 'Airy',),
('Slash', 'Watery',),
)
resistTable = dict(resistTableOrdered)
type = 'Shielding Jewel'
for (key, val) in list(resistTable.items()):
resistTable[key] = (val, type, GemDusts[type], GemLiquids[val])
resistTable = d2(resistTable)
resistList = t2([x[0] for x in resistTableOrdered])
resistValues = t2(('1', '2', '3', '5', '7', '9', '11', '13', '15', '17',))
# DUPLICATE 'resistList', ADD NON-CRAFTABLE 'Essence' RESIST
dropResistList = t2(resistList + ('Essence',))
dropResistTable = dict().fromkeys(dropResistList)
del resistTableOrdered
focusTable = {
'Albion': {
'All Spell Lines': ('Brilliant', 'Sigil',),
'Body Magic': ('Heat', 'Sigil',),
'Cold Magic': ('Ice', 'Sigil',),
'Death Servant': ('Ashen', 'Sigil',),
'Deathsight': ('Vacuous', 'Sigil',),
'Earth Magic': ('Earth', 'Sigil',),
'Fire Magic': ('Fire', 'Sigil',),
'Matter Magic': ('Dust', 'Sigil',),
'Mind Magic': ('Water', 'Sigil',),
'Painworking': ('Salt Crusted', 'Sigil',),
'Spirit Magic': ('Vapor', 'Sigil',),
'Wind Magic': ('Air', 'Sigil',),
},
'Hibernia': {
'All Spell Lines': ('Brilliant', 'Spell Stone',),
'Arboreal Path': ('Steaming', 'Spell Stone',),
'Creeping Path': ('Oozing', 'Spell Stone',),
'Enchantments': ('Vapor', 'Spell Stone',),
'Ethereal Shriek': ('Ethereal', 'Spell Stone',),
'Light': ('Fire', 'Spell Stone',),
'Mana': ('Water', 'Spell Stone',),
'Mentalism': ('Earth', 'Spell Stone',),
'Phantasmal Wail': ('Phantasmal', 'Spell Stone',),
'Spectral Guard': ('Spectral', 'Spell Stone',),
'Verdant Path': ('Mineral Encrusted', 'Spell Stone',),
'Void': ('Ice', 'Spell Stone',),
},
'Midgard': {
'All Spell Lines': ('Brilliant', 'Rune',),
'Bone Army': ('Ashen', 'Rune',),
'Cursing': ('Blighted', 'Rune',),
'Darkness': ('Ice', 'Rune',),
'Runecarving': ('Heat', 'Rune',),
'Summoning': ('Vapor', 'Rune',),
'Suppression': ('Dust', 'Rune',),
},
'All': {}}
for realm in Realms:
for (key, val) in list(focusTable[realm].items()):
if val[0] in GemLiquids:
liquid = GemLiquids[val[0]]
else:
liquid = GemLiquids[val[0] + " " + val[1].split()[0]]
focusTable[realm][key] = (val[0], val[1], GemDusts[val[1]], liquid,)
focusTable[realm] = d2(focusTable[realm])
focusTable['All'].update(focusTable[realm])
focusTable['All'] = d2(focusTable['All'])
focusTable = d2(focusTable)
focusList = {}
for realm in list(focusTable.keys()):
focusList[realm] = list(focusTable[realm].keys())
focusList[realm].sort()
focusList[realm] = t2(focusList[realm])
focusList = d2(focusList)
focusValues = t2(('5', '10', '15', '20', '25', '30', '35', '40', '45', '50',))
skillTable = {
'Albion': {
'All Magic Skills': ('Finesse', 'Fervor Sigil',),
'All Melee Weapon Skills': ('Finesse', 'War Sigil',),
'Archery': ('Airy', 'War Sigil',),
'Aura Manipulation': ('Radiant', 'Fervor Sigil',),
'Body Magic': ('Heated', 'Evocation Sigil',),
'Chants': ('Earthen', 'Fervor Sigil',),
'Cold Magic': ('Icy', 'Evocation Sigil',),
'Critical Strike': ('Heated', 'Battle Jewel',),
'Crossbow': ('Vapor', 'War Sigil',),
'Crush': ('Fiery', 'War Sigil',),
'Death Servant': ('Ashen', 'Fervor Sigil',),
'Deathsight': ('Vacuous', 'Fervor Sigil',),
'Dual Wield': ('Icy', 'War Sigil',),
'Earth Magic': ('Earthen', 'Evocation Sigil',),
'Enhancement': ('Airy', 'Fervor Sigil',),
'Envenom': ('Dusty', 'Battle Jewel',),
'Flexible': ('Molten Magma', 'War Sigil',),
'Fire Magic': ('Fiery', 'Evocation Sigil',),
'Fist Wraps': ('Glacial', 'War Sigil',),
'Instruments': ('Vapor', 'Fervor Sigil',),
'Magnetism': ('Magnetic', 'Fervor Sigil',),
'Matter Magic': ('Dusty', 'Evocation Sigil',),
'Mauler Staff': ('Cinder', 'War Sigil',),
'Mind Magic': ('Watery', 'Evocation Sigil',),
'Painworking': ('Salt Crusted', 'Fervor Sigil',),
'Parry': ('Vapor', 'Battle Jewel',),
'Polearm': ('Earthen', 'War Sigil',),
'Power Strikes': ('Clout', 'Fervor Sigil',),
'Rejuvenation': ('Watery', 'Fervor Sigil',),
'Shield': ('Fiery', 'Battle Jewel',),
'Slash': ('Watery', 'War Sigil',),
'Smite': ('Fiery', 'Fervor Sigil',),
'Soulrending': ('Steaming', 'Fervor Sigil',),
'Spirit Magic': ('Vapor', 'Evocation Sigil',),
'Staff': ('Earthen', 'Battle Jewel',),
'Stealth': ('Airy', 'Battle Jewel',),
'Thrust': ('Dusty', 'War Sigil',),
'Two Handed': ('Heated', 'War Sigil',),
'Wind Magic': ('Airy', 'Evocation Sigil',),
},
'Hibernia': {
'All Magic Skills': ('Finesse', 'Nature Spell Stone',),
'All Melee Weapon Skills': ('Finesse', 'War Spell Stone',),
'Arboreal Path': ('Steaming', 'Nature Spell Stone',),
'Archery': ('Airy', 'War Spell Stone',),
'Aura Manipulation': ('Radiant', 'Nature Spell Stone'),
'Blades': ('Watery', 'War Spell Stone',),
'Blunt': ('Fiery', 'War Spell Stone',),
'Celtic Dual': ('Icy', 'War Spell Stone',),
'Celtic Spear': ('Earthen', 'War Spell Stone',),
'Creeping Path': ('Oozing', 'Nature Spell Stone',),
'Critical Strike': ('Heated', 'Battle Jewel',),
'Dementia': ('Aberrant', 'Arcane Spell Stone',),
'Enchantments': ('Vapor', 'Arcane Spell Stone',),
'Envenom': ('Dusty', 'Battle Jewel',),
'Ethereal Shriek': ('Ethereal', 'Arcane Spell Stone',),
'Fist Wraps': ('Glacial', 'War Spell Stone'),
'Large Weaponry': ('Heated', 'War Spell Stone',),
'Light': ('Fiery', 'Arcane Spell Stone',),
'Magnetism': ('Magnetic', 'Nature Spell Stone'),
'Mana': ('Watery', 'Arcane Spell Stone',),
'Mauler Staff': ('Cinder', 'War Spell Stone'),
'Mentalism': ('Earthen', 'Arcane Spell Stone',),
'Music': ('Airy', 'Nature Spell Stone',),
'Nature': ('Earthen', 'Nature Spell Stone',),
'Nurture': ('Fiery', 'Nature Spell Stone',),
'Parry': ('Vapor', 'Battle Jewel',),
'Phantasmal Wail': ('Phantasmal', 'Arcane Spell Stone',),
'Piercing': ('Dusty', 'War Spell Stone',),
'Power Strikes': ('Clout', 'Nature Spell Stone'),
'Regrowth': ('Watery', 'Nature Spell Stone',),
'Scythe': ('Light', 'War Spell Stone',),
'Shadow Mastery': ('Shadowy', 'Arcane Spell Stone',),
'Shield': ('Fiery', 'Battle Jewel',),
'Spectral Guard': ('Spectral', 'Arcane Spell Stone',),
'Staff': ('Earthen', 'Battle Jewel',),
'Stealth': ('Airy', 'Battle Jewel',),
'Valor': ('Airy', 'Arcane Spell Stone',),
'Vampiiric Embrace': ('Embracing', 'Arcane Spell Stone',),
'Verdant Path': ('Mineral Encrusted', 'Nature Spell Stone',),
'Void': ('Icy', 'Arcane Spell Stone',),
},
'Midgard': {
'All Magic Skills': ('Finesse', 'Primal Rune',),
'All Melee Weapon Skills': ('Finesse', 'War Rune',),
'Archery': ('Airy', 'War Rune',),
'Augmentation': ('Airy', 'Chaos Rune',),
'Aura Manipulation': ('Radiant', 'Primal Rune',),
'Axe': ('Earthen', 'War Rune',),
'Battlesongs': ('Airy', 'Primal Rune',),
'Beastcraft': ('Earthen', 'Primal Rune',),
'Bone Army': ('Ashen', 'Primal Rune',),
'Cave Magic': ('Fiery', 'Chaos Rune',),
'Critical Strike': ('Heated', 'Battle Jewel',),
'Cursing': ('Blighted', 'Primal Rune',),
'Darkness': ('Icy', 'Chaos Rune',),
'Envenom': ('Dusty', 'Battle Jewel',),
'Fist Wraps': ('Glacial', 'War Rune',),
'Hammer': ('Fiery', 'War Rune',),
'Hand To Hand': ('Lightning Charged', 'War Rune',),
'Hexing': ('Unholy', 'Primal Rune',),
'Left Axe': ('Icy', 'War Rune',),
'Magnetism': ('Magnetic', 'Primal Rune',),
'Mauler Staff': ('Cinder', 'War Rune',),
'Mending': ('Watery', 'Chaos Rune',),
'Odin\'s Will': ('Valiant', 'Primal Rune',),
'Parry': ('Vapor', 'Battle Jewel',),
'Power Strikes': ('Clout', 'Primal Rune',),
'Runecarving': ('Heated', 'Chaos Rune',),
'Shield': ('Fiery', 'Battle Jewel',),
'Spear': ('Heated', 'War Rune',),
'Staff': ('Earthen', 'Battle Jewel',),
'Stealth': ('Airy', 'Battle Jewel',),
'Stormcalling': ('Fiery', 'Primal Rune',),
'Summoning': ('Vapor', 'Chaos Rune',),
'Suppression': ('Dusty', 'Chaos Rune',),
'Sword': ('Watery', 'War Rune',),
'Thrown Weapons': ('Vapor', 'War Rune',),
}, 'All': {}}
for realm in Realms:
for (key, val) in list(skillTable[realm].items()):
if val[0] in GemLiquids:
liquid = GemLiquids[val[0]]
else:
liquid = GemLiquids[val[0] + " " + val[1].split()[0]]
skillTable[realm][key] = (val[0], val[1], GemDusts[val[1]], liquid,)
skillTable[realm] = d2(skillTable[realm])
skillTable['All'].update(skillTable[realm])
skillTable['All'] = d2(skillTable['All'])
skillTable = d2(skillTable)
skillList = {}
dropSkillList = {}
for realm in list(skillTable.keys()):
skills = list(skillTable[realm].keys())
skills.sort()
skillList[realm] = t2(skills)
skills.insert(2, 'All Archery Skills')
skills.insert(3, 'All Dual Wield Skills')
if realm == 'Midgard': # ADD NON-CRAFTABLE 'Witchcraft' SKILL
skills.append('Witchcraft')
dropSkillList[realm] = t2(skills)
skillList = d2(skillList)
dropSkillList = d2(dropSkillList)
skillValues = t2(('1', '2', '3', '4', '5', '6', '7', '8',))
capIncreaseList = t2(dropStatList + (
'Fatigue',
))
otherBonusList = t2(( # XFERED
'% Power Pool',
'AF',
'Archery Damage',
'Archery Range',
'Archery Speed',
'Casting Speed',
'Duration of Spells',
'Fatigue',
'Healing Effectiveness',
'Melee Damage',
'Melee Combat Speed',
'Spell Damage',
'Spell Piercing',
'Spell Range',
'Stat Buff Effectiveness',
'Stat Debuff Effectiveness',
'Style Damage',
'Unique Bonus...',
))
mythicalBonusList = t2((
'Coin',
'Bounty Points',
'Realm Points',
'Crowd Control Reduction',
'Endurance Regen',
'Health Regen',
'Power Regen',
'Safe Fall',
'Seige Speed',
'Spell Increase',
'Physical Defense',
'DPS',
'Block',
'Evade',
'Parry',
))
pveBonusList = t2((
'Arrow Recovery',
'Bladeturn Reinforcement',
'Block',
'Concentration',
'Damage Reduction',
'Death Experience Loss Reduction',
'Defensive',
'Evade',
'Negative Effect Duration Reduction',
'Parry',
'Piece Ablative',
'Reactionary Style Damage',
'Spell Power Cost Reduction',
'Style Cost Reduction',
'To Hit',
'Unique PvE Bonus...',
))
# THE TIER (DROPPED), 10, 7, 5 REPEAT IS FOR NEWER TINCTURES THAT
# JUMP FROM LEVEL 25 TO 35 TO 47. THE THIRD ELT OF THE TUPLES
# IN THE EFFECTS TABLE IS AN INDEX TO THE METALS (OFFSET BY THE
# SELECTED EFFECT). DROP TINCTURES HAVE NO METAL, SO THEY HAVE
# BEEN OMITTED FROM THESE LISTS.
metalCommon = (
"",
"Arcanium",
"Netherium",
"Asterite",
"Adamantium",
"Mithril",
"Fine Alloy",
"Alloy",
"",
"Arcanium",
"Adamantium",
"Fine Alloy",
)
EffectMetal = d2({
'All':
metalCommon,
'Albion':
metalCommon,
'Hibernia': (
"",
"Arcanite",
"Netherite",
"Diamond",
"Sapphire",
"Carbide",
"Cobolt",
"Dolomite",
"",
"Arcanite",
"Sapphire",
"Cobolt",
),
'Midgard': metalCommon,
})
ddEffDmgTable = t2(("95", "86", "77", "68", "59", "50", "41",))
ddEffReqLevel = ("47", "43", "40", "35", "30", "25", "20",)
offensiveEffectValues = d2({
'Direct Damage (Fire)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Cold)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Energy)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Spirit)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Damage Over Time': (t2(("64",)), ("47",), 1,),
'Self AF Shield': (t2(("75",)), ("47",), 1,),
'Self Melee Haste': (t2(("20%",)), ("47",), 1,),
'Self Damage Shield': (t2(("5.1",)), ("47",), 1,),
'Self Melee Health Buffer': (t2(("150", "50",)), ("48", "47",), 0,),
'Self Damage Add': (t2(("11.3",)), ("48",), 0,),
'Lifedrain': (t2(("65",)), ("48",), 0,),
'Heal': (t2(("80",)), ("48",), 0,),
'Taunt': (t2(("2", "1",)), ("49", "45",), 1,),
'Power Drain': (t2(("55", "35",)), ("49", "45",), 1,),
})
ddEffDmgTable = t2(ddEffDmgTable[0:3])
reactiveEffectValues = offensiveEffectValues.copy()
del reactiveEffectValues['Taunt']
reactiveEffectValues.update({
'Direct Damage (Fire)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Cold)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Energy)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Direct Damage (Spirit)': (ddEffDmgTable, ddEffReqLevel, 1,),
'Self AF Shield': (t2(("75", "56", "37",)), ("47", "35", "25",), 9,),
'Self Damage Shield': (t2(("5.1", "3.6", "2.6",)), ("47", "35", "25",), 9,),
'Self Melee Health Buffer': (t2(("150", "100", "75", "50",)), ("48", "47", "35", "25",), 8,),
'Omni Lifedrain': (t2(("100", "75",)), ("49", "45",), 1,),
'Speed Decrease': (t2(("35%", "30%",)), ("49", "45",), 1,),
})
reactiveEffectValues = d2(reactiveEffectValues)
chargedEffectValues = offensiveEffectValues.copy()
del chargedEffectValues['Heal']
del chargedEffectValues['Taunt']
del chargedEffectValues['Power Drain']
chargedEffectValues.update({
'Self Melee Haste': (t2(("17%",)), ("47",), 1,),
'Lifedrain': (t2(("65",)), ("47",), 1,),
'Str/Con Debuff': (t2(("56",)), ("47",), 1,),
'Dex/Qui Debuff': (t2(("56",)), ("47",), 1,),
'Self Damage Add': (t2(("11.3",)), ("47",), 1,),
'Power Regeneration': (t2(("2",)), ("48",), 0,),
'Self Acuity Buff': (t2(("75", "56", "37",)), ("47", "35", "25",), 9,),
'Self AF Shield': (t2(("75", "56", "37",)), ("47", "35", "25",), 9,),
'Self Damage Shield': (t2(("4.2", "2.9", "2.1",)), ("47", "35", "25",), 9,),
'Self Melee Attack Speed': (t2(("15%", "10%",)), ("49", "45",), 1,),
'Power Transfer': (t2(("60",)), ("45",), 2,),
'Health Transfer': (t2(("70",)), ("45",), 2,),
'Self Cure Poison': (t2(("1",)), ("49",), 1,),
'Self Cure Disease': (t2(("1",)), ("49",), 1,),
})
chargedEffectValues = d2(chargedEffectValues)
offensiveEffectList = list(offensiveEffectValues.keys())
offensiveEffectList.sort()
offensiveEffectList = t2(offensiveEffectList)
reactiveEffectList = list(reactiveEffectValues.keys())
reactiveEffectList.sort()
reactiveEffectList = t2(reactiveEffectList)
chargedEffectList = list(chargedEffectValues.keys())
chargedEffectList.sort()
chargedEffectList = t2(chargedEffectList)
otherEffectList = list(chargedEffectList) + [
'Direct Damage (Body)',
'Direct Damage (Energy)',
'Dmg w/Resist Debuff (Fire)',
'Dmg w/Resist Debuff (Cold)',
'Dmg w/Resist Debuff (Body)',
'Dmg w/Resist Debuff (Energy)',
'Dmg w/Resist Debuff (Matter)',
'Dmg w/Resist Debuff (Spirit)',
'Heal',
'Taunt',
'Power Drain',
'Omni Lifedrain',
'Speed Decrease',
]
otherEffectList.sort()
otherEffectList = t2(otherEffectList + ['Unique Effect...', ])
ProcItemNames = d2({
'Direct Damage (Fire)': t2(('Fiery', 'Fire',)),
'Direct Damage (Cold)': t2(('', 'Cold',)),
'Direct Damage (Energy)': t2(('', 'Energy',)),
'Direct Damage (Spirit)': t2(('', 'Spirit',)),
'Damage Over Time': t2(('', 'Eroding',)),
'Self AF Shield': t2(('', 'Hardening',)),
'Self Damage Shield': t2(('Barbed', 'Shard',)),
'Self Melee Haste': t2(('', 'Celeric',)),
'Self Melee Health Buffer': t2(('', 'Ablative', 'Harm Turning')),
'Self Damage Add': t2(('', '', 'Retributive',)),
'Lifedrain': t2(('', '', 'Soul Leeching',)),
'Heal': t2(('', '', 'Mending',)),
'Taunt': t2(('', 'Provoking',)),
'Power Drain': t2(('', 'Depletion',)),
'Omni Lifedrain': t2(('', 'Draining',)),
'Speed Decrease': t2(('', 'Coil',)),
})
StableItemNames = ProcItemNames.copy()
del StableItemNames['Heal']
del StableItemNames['Taunt']
del StableItemNames['Power Drain']
del StableItemNames['Omni Lifedrain']
del StableItemNames['Speed Decrease']
StableItemNames.update({
'Direct Damage (Cold)': t2(('Frostbringer', 'Cold',)),
'Direct Damage (Energy)': t2(('Crackling', 'Energy',)),
'Direct Damage (Spirit)': t2(('Frenzied', 'Spirit',)),
'Damage Over Time': t2(('Illbane', 'Eroding',)),
'Self AF Shield': t2(('Hardening', 'Hardening',)),
'Self Damage Add': t2(('Keen', 'Honing',)),
'Lifedrain': t2(('Soul Drinker', 'Leeching',)),
'Self Acuity Buff': t2(('Owl-runed', 'Enlightening',)),
'Dex/Qui Debuff': t2(('Crippling', 'Crippling',)),
'Str/Con Debuff': t2(('', 'Withering',)),
'Power Regeneration': t2(('', '', 'Mind\'s Eye',)),
'Self Melee Attack Speed': t2(('', 'Greater Celeric',)),
'Power Transfer': t2(('', 'Transference',)),
'Health Transfer': t2(('', 'Shifting',)),
'Self Cure Poison': t2(('', 'Neutralizing',)),
'Self Cure Disease': t2(('', 'Revivifying',)),
})
StableItemNames = d2(StableItemNames)
EffectTypeNames = d2({
'Charged Effect': t2(("Stable", "Tincture",)),
'Reactive Effect': t2(("Reactive", "Armor Tincture",)),
'Offensive Effect': t2(("Volatile", "Weapon Tincture",)),
})
GemTables = {
'All': {
'Unused': unusedTable,
'Stat': statTable,
'Resist': resistTable,
}
}
GemLists = {
'All': {
'Unused': unusedList,
'Stat': statList,
'Resist': resistList,
'Charged Effect': chargedEffectList,
'Offensive Effect': offensiveEffectList,
'Reactive Effect': reactiveEffectList,
}
}
DropLists = { # XFERED
'All': {
'Unused': unusedList,
'Resist': dropResistList,
'Stat': dropStatList,
'Cap Increase': capIncreaseList,
'Mythical Cap Increase': None,
'Mythical Bonus': mythicalBonusList,
'PvE Bonus': pveBonusList,
'Other Bonus': otherBonusList,
'Charged Effect': otherEffectList,
'Reactive Effect': otherEffectList,
'Offensive Effect': otherEffectList,
'Other Effect': otherEffectList,
}
}
# Only use GemTables['All'] when the specific realm of craft isn't known as
# there are many multi-realm gems which have different names and recipes
#
for realm in Realms:
GemTables[realm] = {}
GemTables[realm].update(GemTables['All'])
GemLists[realm] = {}
GemLists[realm].update(GemLists['All'])
DropLists[realm] = {}
DropLists[realm].update(DropLists['All'])
for realm in list(GemTables.keys()):
GemTables[realm]['Focus'] = focusTable[realm]
GemTables[realm]['Skill'] = skillTable[realm]
GemTables[realm] = d2(GemTables[realm])
GemLists[realm]['Focus'] = focusList[realm]
DropLists[realm]['Focus'] = focusList[realm]
GemLists[realm]['Skill'] = skillList[realm]
DropLists[realm]['Skill'] = dropSkillList[realm]
GemLists[realm] = d2(GemLists[realm])
DropLists[realm] = d2(DropLists[realm])
GemTables = d2(GemTables)
GemLists = d2(GemLists)
DropLists = d2(DropLists)
ValuesLists = d2({ # XFERED
'Stat': d2({
None: statValues,
'Hits': hitsValues,
'Power': powerValues,
}),
'Resist': resistValues,
'Focus': focusValues,
'Skill': skillValues,
'Charged Effect': chargedEffectValues,
'Offensive Effect': offensiveEffectValues,
'Reactive Effect': reactiveEffectValues,
'Unused': unusedValues,
})
CraftedTypeList = t2(( # XFERED
'Unused',
'Focus',
'Skill',
'Stat',
'Cap Increase',
'PvE Bonus',
'Other Bonus',
'Charged Effect',
'Offensive Effect',
))
CraftedValuesLists = d2({ # XFERED
'Unused': unusedValues,
'Focus': t2(('50',)),
'Skill': t2(('3',)),
'Stat': d2({
None: t2(('15',)),
'Hits': t2(('40',)),
}),
'Cap Increase': d2({
None: t2(('5',)),
'Hits': t2(('40',)),
}),
'PvE Bonus': t2(('5',)),
'PvE Bonus': d2({
None: t2(('5',)),
'To Hit': t2(('3',)),
}),
'Other Bonus': d2({
None: t2(('5',)),
'AF': t2(('10',)),
'Archery Damage': t2(('2',)),
'Melee Damage': t2(('2',)),
'Spell Damage': t2(('2',)),
}),
'Charged Effect': t2(("60",)),
'Offensive Effect': t2(("60", "25", "20",)),
})
CraftedLists = {
'All': d2({
'Unused':
unusedList,
'Focus': t2((
'All Spell Lines',
)),
'Skill': t2((
'All Archery Skills',
'All Dual Wield Skills',
'All Magic Skills',
'All Melee Weapon Skills',
'Shield',
)),
'Stat': t2(
dropStatList[0:4]
+ dropStatList[9:]
),
'Cap Increase': t2((
'Strength',
'Constitution',
'Dexterity',
'Quickness',
'Acuity',
'Hits',
'Power',
'Fatigue',
)),
'Other Bonus': t2((
'% Power Pool',
'Fatigue',
'AF',
'Archery Damage',
'Melee Damage',
'Spell Damage',
'Duration of Spells',
'Healing Effectiveness',
'Stat Buff Effectiveness',
)),
'PvE Bonus': t2((
'Defensive',
'To Hit',
)),
'Charged Effect': t2((
'Dmg w/Resist Debuff (Fire)',
'Dmg w/Resist Debuff (Cold)',
'Dmg w/Resist Debuff (Matter)',
'Dmg w/Resist Debuff (Spirit)',
)),
'Offensive Effect': t2((
'Direct Damage (Fire)',
'Direct Damage (Cold)',
'Direct Damage (Energy)',
'Dmg w/Resist Debuff (Fire)',
'Dmg w/Resist Debuff (Cold)',
'Dmg w/Resist Debuff (Matter)',
'Dmg w/Resist Debuff (Spirit)',
)),
}),
}
for realm in Realms:
CraftedLists[realm] = CraftedLists['All']
CraftedLists = d2(CraftedLists)
Caps = dict.fromkeys(resistList, 'Resist')
Caps.update(Caps.fromkeys(statList, 'Stat'))
Caps = d2(Caps)
# BONUSES ARE CALCULATED AS % OF LEVEL + CONSTANT
# E.G. [.25, 1] IS THE LEVEL / 4 + 1
# [ 0, 10] IS A FIXED 10 VALUE
# [ 4, 0] IS THE LEVEL * 4
HighCapBonusList = d2({
'AF': (1.00, 0),
'AF Cap': (1.00, 0),
'Arrow Recovery': (1.00, 0),
'Death Experience Loss Reduction': (1.00, 0),
'Duration of Spells': (.50, 0),
'Fatigue': (.50, 0),
'Fatigue Cap': (.50, 0),
'Focus': (1.00, 0),
'Healing Effectiveness': (.50, 0),
'Hits': (4.00, 0),
'Hits Cap': (8.00, 0),
'Power': (.50, 1),
'Power Cap': (1.00, 0),
'% Power Pool': (.50, 0),
'% Power Pool Cap': (1.00, 0),
'PvE Bonus': (.20, 0),
'Resist': (.50, 1),
'Skill': (.20, 1),
'Stat': (1.50, 0),
'Stat Cap': (.50, 1),
'Stat Buff Effectiveness': (.50, 0),
'Stat Debuff Effectiveness': (.50, 0),
'Other Bonus': (.20, 0),
})
# BONUSES ARE CALCULATED AS % OF LEVEL + CONSTANT
# E.G. [.25, 1] IS THE LEVEL / 4 + 1
# [ 0, 10] IS A FIXED 10 VALUE
# [ 4, 0] IS THE LEVEL * 4
MythicalCapBonusList = d2({
'Crowd Control Reduction': (1.00, 0),
'DPS': (.20, 0),
'Endurance Regen': (1.00, 0),
'Health Regen': (1.00, 0),
'Power Regen': (1.00, 0),
'Mythical Stat Cap': (.50, 1),
'Mythical Resist Cap': (0, 0),
'Mythical Bonus': (0, 0),
})
MaterialGems = t2(('Lo', 'Um', 'On', 'Ee', 'Pal', 'Mon', 'Ros', 'Zo', 'Kath', 'Ra',))
GemCosts = t2((160, 920, 3900, 13900, 40100, 88980, 133000, 198920, 258240, 296860,))
RemakeCosts = t2((120, 560, 1740, 5260, 14180, 30660, 45520, 67680, 87640, 100700,))
GemNames = t2((
'Raw',
'Uncut',
'Rough',
'Flawed',
'Imperfect',
'Polished',
'Faceted',
'Precious',
'Flawless',
'Perfect',
))
liquidsOrder = (
'Air Elemental Essence',
'Draconic Fire',
'Frost From a Wasteland',
'Giant Blood',
'Heat From an Unearthly Pyre',
'Leviathan Blood',
'Mystic Energy',
'Sun Light',
'Swamp Fog',
'Treant Blood',
'Undead Ash and Holy Water',
)
dustsOrder = (
'Bloodied Battlefield Dirt',
'Essence of Life',
'Fairy Dust',
'Ground Blessed Undead Bone',
'Ground Caer Stone',
'Ground Cave Crystal',
'Ground Draconic Scales',
'Ground Giant Bone',
'Ground Vendo Bone',
'Other Worldly Dust',
'Soot From Niflheim',
'Unseelie Dust',
)
MaterialsOrder = t2(MaterialGems + liquidsOrder + dustsOrder)
GemSubName = d2({
'Stat': 'Essence Jewel',
'Resist': 'Shielding Jewel',
'Hits': 'Essence Jewel',
'Power': 'Essence Jewel',
'Focus': '',
'Skill': '',
})
HotkeyGems = d2({
'Albion': d2({
'Fiery Essence Jewel': 0,
'Earthen Essence Jewel': 2,
'Vapor Essence Jewel': 4,
'Airy Essence Jewel': 6,
'Watery Essence Jewel': 8,
'Heated Essence Jewel': 10,
'Dusty Essence Jewel': 12,
'Icy Essence Jewel': 14,
'Earthen Shielding Jewel': 16,
'Icy Shielding Jewel': 18,
'Heated Shielding Jewel': 20,
'Light Shielding Jewel': 22,
'Airy Shielding Jewel': 24,
'Vapor Shielding Jewel': 26,
'Dusty Shielding Jewel': 28,
'Fiery Shielding Jewel': 30,
'Watery Shielding Jewel': 32,
'Vapor Battle Jewel': 34,
'Fiery Battle Jewel': 36,
'Earthen Battle Jewel': 38,
'Airy Battle Jewel': 40,
'Dusty Battle Jewel': 42,
'Heated Battle Jewel': 44,
'Watery War Sigil': 46,
'Fiery War Sigil': 48,
'Dusty War Sigil': 50,
'Heated War Sigil': 52,
'Earthen War Sigil': 54,
'Airy War Sigil': 56,
'Vapor War Sigil': 58,
'Icy War Sigil': 60,
'Fiery Fervor Sigil': 62,
'Airy Fervor Sigil': 64,
'Watery Fervor Sigil': 66,
'Earthen Fervor Sigil': 68,
'Vapor Fervor Sigil': 70,
'Earthen Evocation Sigil': 72,
'Icy Evocation Sigil': 74,
'Fiery Evocation Sigil': 76,
'Airy Evocation Sigil': 78,
'Heated Evocation Sigil': 80,
'Dusty Evocation Sigil': 82,
'Vapor Evocation Sigil': 84,
'Watery Evocation Sigil': 86,
'Blood Essence Jewel': 88,
'Mystical Essence Jewel': 90,
'Earth Sigil': 92,
'Ice Sigil': 94,
'Fire Sigil': 96,
'Air Sigil': 98,
'Heat Sigil': 100,
'Dust Sigil': 102,
'Vapor Sigil': 104,
'Water Sigil': 106,
'Molten Magma War Sigil': 108,
'Vacuous Fervor Sigil': 110,
'Salt Crusted Fervor Sigil': 112,
'Ashen Fervor Sigil': 114,
'Steaming Fervor Sigil': 116,
'Vacuous Sigil': 118,
'Salt Crusted Sigil': 120,
'Ashen Sigil': 122,
'Brilliant Sigil': 124,
'Finesse War Sigil': 126,
'Finesse Fervor Sigil': 128,
'Glacial War Sigil': 130,
'Cinder War Sigil': 132,
'Radiant Fervor Sigil': 134,
'Magnetic Fervor Sigil': 136,
'Clout Fervor Sigil': 138,
}),
'Hibernia': d2({
'Fiery Essence Jewel': 0,
'Earthen Essence Jewel': 2,
'Vapor Essence Jewel': 4,
'Airy Essence Jewel': 6,
'Watery Essence Jewel': 8,
'Heated Essence Jewel': 10,
'Dusty Essence Jewel': 12,
'Icy Essence Jewel': 14,
'Earthen Shielding Jewel': 16,
'Icy Shielding Jewel': 18,
'Heated Shielding Jewel': 20,
'Light Shielding Jewel': 22,
'Airy Shielding Jewel': 24,
'Vapor Shielding Jewel': 26,
'Dusty Shielding Jewel': 28,
'Fiery Shielding Jewel': 30,
'Watery Shielding Jewel': 32,
'Vapor Battle Jewel': 34,
'Fiery Battle Jewel': 36,
'Earthen Battle Jewel': 38,
'Airy Battle Jewel': 40,
'Dusty Battle Jewel': 42,
'Heated Battle Jewel': 44,
'Watery War Spell Stone': 46,
'Fiery War Spell Stone': 48,
'Dusty War Spell Stone': 50,
'Heated War Spell Stone': 52,
'Earthen War Spell Stone': 54,
'Icy War Spell Stone': 56,
'Airy War Spell Stone': 58,
'Fiery Nature Spell Stone': 60,
'Watery Nature Spell Stone': 62,
'Earthen Nature Spell Stone': 64,
'Airy Nature Spell Stone': 66,
'Airy Arcane Spell Stone': 68,
'Fiery Arcane Spell Stone': 70,
'Watery Arcane Spell Stone': 72,
'Vapor Arcane Spell Stone': 74,
'Icy Arcane Spell Stone': 76,
'Earthen Arcane Spell Stone': 78,
'Blood Essence Jewel': 80,
'Mystical Essence Jewel': 82,
'Fire Spell Stone': 84,
'Water Spell Stone': 86,
'Vapor Spell Stone': 88,
'Ice Spell Stone': 90,
'Earth Spell Stone': 92,
'Light War Spell Stone': 94,
'Steaming Nature Spell Stone': 96,
'Oozing Nature Spell Stone': 98,
'Mineral Encrusted Nature Spell Stone': 100,
'Steaming Spell Stone': 102,
'Oozing Spell Stone': 104,
'Mineral Encrusted Spell Stone': 106,
'Spectral Spell Stone': 108,
'Phantasmal Spell Stone': 110,
'Ethereal Spell Stone': 112,
'Spectral Arcane Spell Stone': 114,
'Phantasmal Arcane Spell Stone': 116,
'Ethereal Arcane Spell Stone': 118,
'Shadowy Arcane Spell Stone': 120,
'Embracing Arcane Spell Stone': 122,
'Aberrant Arcane Spell Stone': 124,
'Brilliant Spell Stone': 126,
'Finesse War Spell Stone': 128,
'Finesse Nature Spell Stone': 130,
'Glacial War Spell Stone': 132,
'Cinder War Spell Stone': 134,
'Radiant Nature Spell Stone': 136,
'Magnetic Nature Spell Stone': 138,
'Clout Nature Spell Stone': 140,
}),
'Midgard': d2({
'Fiery Essence Jewel': 0,
'Earthen Essence Jewel': 2,
'Vapor Essence Jewel': 4,
'Airy Essence Jewel': 6,
'Watery Essence Jewel': 8,
'Heated Essence Jewel': 10,
'Dusty Essence Jewel': 12,
'Icy Essence Jewel': 14,
'Earthen Shielding Jewel': 16,
'Icy Shielding Jewel': 18,
'Heated Shielding Jewel': 20,
'Light Shielding Jewel': 22,
'Airy Shielding Jewel': 24,
'Vapor Shielding Jewel': 26,
'Dusty Shielding Jewel': 28,
'Fiery Shielding Jewel': 30,
'Watery Shielding Jewel': 32,
'Vapor Battle Jewel': 34,
'Fiery Battle Jewel': 36,
'Earthen Battle Jewel': 38,
'Airy Battle Jewel': 40,
'Dusty Battle Jewel': 42,
'Heated Battle Jewel': 44,
'Watery War Rune': 46,
'Fiery War Rune': 48,
'Earthen War Rune': 50,
'Heated War Rune': 52,
'Airy War Rune': 54,
'Vapor War Rune': 56,
'Icy War Rune': 58,
'Earthen Primal Rune': 60,
'Airy Primal Rune': 62,
'Fiery Primal Rune': 64,
'Icy Chaos Rune': 66,
'Dusty Chaos Rune': 68,
'Heated Chaos Rune': 70,
'Vapor Chaos Rune': 72,
'Watery Chaos Rune': 74,
'Airy Chaos Rune': 76,
'Fiery Chaos Rune': 78,
'Blood Essence Jewel': 82,
'Mystical Essence Jewel': 84,
'Ice Rune': 86,
'Dust Rune': 88,
'Heat Rune': 90,
'Vapor Rune': 92,
'Lightning Charged War Rune': 94,
'Ashen Primal Rune': 96,
'Ashen Rune': 98,
'Blighted Rune': 100,
'Valiant Primal Rune': 104,
'Blighted Primal Rune': 106,
'Unholy Primal Rune': 108,
'Brilliant Rune': 110,
'Finesse War Rune': 112,
'Finesse Primal Rune': 114,
'Glacial War Rune': 116,
'Cinder War Rune': 118,
'Radiant Primal Rune': 120,
'Magnetic Primal Rune': 122,
'Clout Primal Rune': 124,
}),
})
ImbueMultipliers = d2({
'Stat': 1.0,
'Resist': 2.0,
'Skill': 5.0,
'Hits': 0.25,
'Power': 2.0,
'Focus': 1.0,
'Unused': 0.0,
})
QualityValues = t2(('94', '95', '96', '97', '98', '99', '100'))
OCStartPercentages = (0, 10, 20, 30, 50, 70)
ItemQualOCModifiers = d2({
'94': 0,
'95': 0,
'96': 6,
'97': 8,
'98': 10,
'99': 18,
'100': 26,
})
ImbuePts = (
1, 2, 2, 3, 4, 4, 5, 5, 6, 7,
7, 8, 9, 9, 10, 10, 11, 12, 12, 13,
13, 14, 15, 15, 16, 16, 17, 18, 18, 19,
20, 20, 21, 21, 22, 23, 23, 24, 24, 25,
26, 26, 27, 27, 28, 29, 29, 30, 31, 31, 32,
)
BodyHitOdds = d2({
'Chest': .40,
'Legs': .25,
'Arms': .15,
'Head': .10,
'Hands': .05,
'Feet': .05,
})
PieceTabList = t2((
'Chest',
'Arms',
'Head',
'Legs',
'Hands',
'Feet',
'Right Hand',
'Left Hand',
'2 Handed',
'Ranged',
'Spare',
))
JewelTabList = t2((
'Neck',
'Cloak',
'Jewel',
'Belt',
'Left Ring',
'Right Ring',
'Left Wrist',
'Right Wrist',
'Mythical',
))
TabList = t2(PieceTabList + JewelTabList)
ArmorTabList = list(PieceTabList[:6])
ArmorTabList.append('Spare')
ArmorTabList = t2(ArmorTabList)
WeaponTabList = t2(PieceTabList[6:])
FocusTabList = t2(('2 Handed', 'Spare',))
FileExt = d2({
'Neck': 'neck',
'Cloak': 'cloak',
'Belt': 'belt',
'Jewel': 'jewel',
'Left Ring': 'ring',
'Right Ring': 'ring',
'Left Wrist': ('bracer', 'wrist',),
'Right Wrist': ('bracer', 'wrist',),
'Chest': 'chest',
'Arms': 'arms',
'Head': 'helm',
'Legs': 'legs',
'Feet': 'boots',
'Hands': 'hands',
'Right Hand': 'wep',
'Left Hand': ('lhwep', 'shield',),
'2 Handed': ('2hwep', 'lhwep', 'wep',),
'Ranged': 'ranged',
'Mythical': 'myth',
'Spare': '*',
})
ShieldTypes = t2((
'Rowan',
'Elm',
'Oaken',
'Ironwood',
'Heartwood',
'Runewood',
'Stonewood',
'Ebonwood',
'Dyrwood',
'Duskwood',
))
FixTypeTable = d2({
'PvE': 'PvE Bonus',
'Hits': 'Stat',
'Power': 'Stat',
})
FixEffectsTable = d2({
'Bonedancing': 'Bone Army',
'PainWorking': 'Painworking',
'Subterranean': 'Cave Magic',
'BeastCraft': 'Beastcraft',
'Arboreal': 'Arboreal Path',
'Arboreal Focus': 'Arboreal Path',
'Body Focus': 'Body Magic',
'Cold Focus': 'Cold Magic',
'Earth Focus': 'Earth Magic',
'Fire Focus': 'Fire Magic',
'Matter Focus': 'Matter Magic',
'Mind Focus': 'Mind Magic',
'Spirit Focus': 'Spirit Magic',
'Wind Focus': 'Wind Magic',
'Composite Bow': 'Archery',
'Recurve Bow': 'Archery',
'Longbow': 'Archery',
'All Focus Bonus': 'All Spell Lines',
'All Melee Skill Bonus': 'All Melee Weapon Skills',
'All Magic Skill Bonus': 'All Magic Skills',
'All Dual Wield Skill Bonus': 'All Dual Wield Skills',
'Archery Skill Bonus': 'All Archery Skills',
'AF Bonus': 'AF',
'Archery Damage Bonus': 'Archery Damage',
'Archery Range Bonus': 'Archery Range',
'Archery Speed Bonus': 'Archery Speed',
'Buff Bonus': 'Stat Buff Effectiveness',
'Casting Range': 'Spell Range',
'Casting Speed Bonus': 'Casting Speed',
'Debuff Bonus': 'Stat Debuff Effectiveness',
'Defensive Bonus': 'Defensive',
'Healing Bonus': 'Healing Effectiveness',
'Spell Damage Bonus': 'Spell Damage',
'Magic Damage': 'Spell Damage',
'Spell Duration Bonus': 'Duration of Spells',
'Spell Range Bonus': 'Spell Range',
'Style Damage Bonus': 'Style Damage',
'Melee Damage Bonus': 'Melee Damage',
'Melee Speed Bonus': 'Melee Combat Speed',
'Power Percentage Bonus': '% Power Pool',
'Strength Cap Increase': 'Strength',
'Constitution Cap Increase': 'Constitution',
'Dexterity Cap Increase': 'Dexterity',
'Quickness Cap Increase': 'Quickness',
'Intelligence Cap Increase': 'Intelligence',
'Piety Cap Increase': 'Piety',
'Charisma Cap Increase': 'Charisma',
'Empathy Cap Increase': 'Empathy',
'Acuity Cap Increase': 'Acuity',
'Power Cap Increase': 'Power',
'Hits Cap Increase': 'Hits',
'AF Cap Increase': 'AF',
'Reactionary Style Damage Bonus': 'Reactionary Style Damage',
'Death XP Loss Reduction': 'Death Experience Loss Reduction',
'Blocking': 'Block',
'PvE': 'PvE Bonus',
'Body Resist': 'Body',
'Cold Resist': 'Cold',
'Heat Resist': 'Heat',
'Energy Resist': 'Energy',
'Matter Resist': 'Matter',
'Spirit Resist': 'Spirit',
'Crush Resist': 'Crush',
'Thrust Resist': 'Thrust',
'Slash Resist': 'Slash',
})
if __name__ == "__main__":
for (realm, realmtable) in list(GemTables.items()):
if realm == "All":
continue
for (type, typetable) in list(realmtable.items()):
for (effect, effecttable) in list(typetable.items()):
try:
name = effecttable[0] + " " + effecttable[1]
tryit = HotkeyGems[realm][name]
except:
sys.stdout.write("Missing %s %s entry\n" % (type, effect,))
pass
|
artomason/KortsCalculator
|
Constants.py
|
Python
|
gpl-2.0
| 42,919
|
[
"CRYSTAL"
] |
bfd95840120c524da7c22c6035bc6d0fac05792194e1054d506eeadbe0bc859b
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from .base import Ordination, OrdinationResults
from .utils import corr, svd_rank, scale
class CCA(Ordination):
r"""Compute constrained (also known as canonical) correspondence
analysis.
Canonical (or constrained) correspondence analysis is a
multivariate ordination technique. It appeared in community
ecology [1]_ and relates community composition to the variation in
the environment (or in other factors). It works from data on
abundances or counts of individuals and environmental variables,
and outputs ordination axes that maximize niche separation among
species.
It is better suited to extract the niches of taxa than linear
multivariate methods because it assumes unimodal response curves
(habitat preferences are often unimodal functions of habitat
variables [2]_).
As more environmental variables are added, the result gets more
similar to unconstrained ordination, so only the variables that
are deemed explanatory should be included in the analysis.
Parameters
----------
Y : array_like Community data matrix of shape (n, m): a
contingency table for m species at n sites.
X : array_like Constraining matrix of shape (n, q): q quantitative
environmental variables at n sites.
Notes
-----
Canonical *correspondence* analysis shouldn't be confused with
canonical *correlation* analysis (CCorA, but sometimes called
CCA), a different technique to search for multivariate
relationships between two datasets. Canonical correlation analysis
is a statistical tool that, given two vectors of random variables,
finds linear combinations that have maximum correlation with each
other. In some sense, it assumes linear responses of "species" to
"environmental variables" and is not well suited to analyze
ecological data.
In data analysis, ordination (or multivariate gradient analysis)
complements clustering by arranging objects (species, samples...)
along gradients so that similar ones are closer and dissimilar
ones are further. There's a good overview of the available
techniques in http://ordination.okstate.edu/overview.htm.
See Also
--------
CA
References
----------
.. [1] Cajo J. F. Ter Braak, "Canonical Correspondence Analysis: A
New Eigenvector Technique for Multivariate Direct Gradient
Analysis", Ecology 67.5 (1986), pp. 1167-1179.
.. [2] Cajo J.F. Braak and Piet F.M. Verdonschot, "Canonical
correspondence analysis and related multivariate methods in
aquatic ecology", Aquatic Sciences 57.3 (1995), pp. 255-289.
"""
short_method_name = 'CCA'
long_method_name = 'Canonical Correspondence Analysis'
def __init__(self, Y, X):
self.Y = np.asarray(Y, dtype=np.float64)
self.X = np.asarray(X, dtype=np.float64)
self._cca()
def _cca(self):
X, Y = self.X, self.Y
if X.shape[0] != Y.shape[0]:
raise ValueError("Contingency and environmental tables must have"
" the same number of rows (sites). X has {0}"
" rows but Y has {1}.".format(X.shape[0],
Y.shape[0]))
if Y.min() < 0:
raise ValueError("Contingency table must be nonnegative")
row_max = Y.max(axis=1)
if np.any(row_max <= 0):
# Or else the lstsq call to compute Y_hat breaks
raise ValueError("Contingency table cannot contain row of only 0s")
# Step 1 (similar to Pearson chi-square statistic)
grand_total = Y.sum()
Q = Y / grand_total # Relative frequencies of X (contingency table)
# Species and site weights (marginal totals)
column_marginals = Q.sum(axis=0)
row_marginals = Q.sum(axis=1)
# Formula 9.32 in Lagrange & Lagrange (1998). Notice that it's an
# scaled version of the contribution of each cell towards Pearson
# chi-square statistic.
expected = np.outer(row_marginals, column_marginals)
Q_bar = (Q - expected) / np.sqrt(expected)
# Step 2. Standardize columns of Y with respect to site weights,
# using the maximum likelyhood variance estimator (Legendre &
# Legendre 1998, p. 595)
X = scale(X, weights=row_marginals, ddof=0)
# Step 3. Weighted multiple regression.
X_weighted = row_marginals[:, None]**0.5 * X
B, _, rank_lstsq, _ = np.linalg.lstsq(X_weighted, Q_bar)
Y_hat = X_weighted.dot(B)
Y_res = Q_bar - Y_hat
# Step 4. Eigenvalue decomposition
u, s, vt = np.linalg.svd(Y_hat, full_matrices=False)
rank = svd_rank(Y_hat.shape, s)
s = s[:rank]
u = u[:, :rank]
vt = vt[:rank]
U = vt.T
# Step 5. Eq. 9.38
U_hat = Q_bar.dot(U) * s**-1
# Residuals analysis
u_res, s_res, vt_res = np.linalg.svd(Y_res, full_matrices=False)
rank = svd_rank(Y_res.shape, s_res)
s_res = s_res[:rank]
u_res = u_res[:, :rank]
vt_res = vt_res[:rank]
U_res = vt_res.T
U_hat_res = Y_res.dot(U_res) * s_res**-1
# Storing values needed to compute scores
iter_ = (('column_marginals', column_marginals),
('row_marginals', row_marginals),
('U', U),
('U_res', U_res),
('U_hat', U_hat),
('U_hat_res', U_hat_res),
('u', u), ('Y_hat', Y_hat),
('s', s), ('s_res', s_res),
('X_weighted', X_weighted[:, :rank_lstsq]))
for val_name, val in iter_:
setattr(self, val_name, val)
self.eigenvalues = np.r_[s, s_res]**2
def scores(self, scaling):
r"""Compute site and species scores for different scalings.
Parameters
----------
scaling : int
The same options as in `CA` are available, and the
interpretation is the same.
"""
if scaling not in {1, 2}:
raise NotImplementedError(
"Scaling {0} not implemented.".format(scaling))
# In this case scores are also a bit intertwined, so we'll
# almost compute them both and then choose.
# Scalings (p. 596 L&L 1998):
# Species scores, scaling 1
V = (self.column_marginals**-0.5)[:, None] * self.U
# Site scores, scaling 2
V_hat = (self.row_marginals**-0.5)[:, None] * self.U_hat
# Site scores, scaling 1
F = V_hat * self.s
# Species scores, scaling 2
F_hat = V * self.s
# Site scores which are linear combinations of environmental
# variables
Z_scaling1 = ((self.row_marginals**-0.5)[:, None] *
self.Y_hat.dot(self.U))
Z_scaling2 = Z_scaling1 * self.s**-1
# Species residual scores, scaling 1
V_res = (self.column_marginals**-0.5)[:, None] * self.U_res
# Site residual scores, scaling 2
V_hat_res = (self.row_marginals**-0.5)[:, None] * self.U_hat_res
# Site residual scores, scaling 1
F_res = V_hat_res * self.s_res
# Species residual scores, scaling 2
F_hat_res = V_res * self.s_res
eigvals = self.eigenvalues
if scaling == 1:
species_scores = np.hstack((V, V_res))
site_scores = np.hstack((F, F_res))
site_constraints = np.hstack((Z_scaling1, F_res))
elif scaling == 2:
species_scores = np.hstack((F_hat, F_hat_res))
site_scores = np.hstack((V_hat, V_hat_res))
site_constraints = np.hstack((Z_scaling2, V_hat_res))
biplot_scores = corr(self.X_weighted, self.u)
return OrdinationResults(eigvals=eigvals,
species=species_scores,
site=site_scores,
biplot=biplot_scores,
site_constraints=site_constraints)
|
Jorge-C/bipy
|
skbio/maths/stats/ordination/canonical_correspondence_analysis.py
|
Python
|
bsd-3-clause
| 8,563
|
[
"scikit-bio"
] |
a026a2d64f298b3fd393c6e2bcb7ec80d197976120ce8dbfa76d9da3d30bc52c
|
import moose
import numpy as np
import rdesigneur as rd
def test_21_vclamp():
"""Test vclamp.
"""
rdes = rd.rdesigneur(
stimList = [['soma', '1', '.', 'vclamp', '-0.065 + (t>0.1 && t<0.2) * 0.02' ]],
plotList = [
['soma', '1', '.', 'Vm', 'Soma membrane potential'],
['soma', '1', 'vclamp', 'current', 'Soma holding current'],
]
)
rdes.buildModel()
moose.reinit()
moose.start( 0.3 )
# rdes.display(block=False)
data = []
for t in moose.wildcardFind('/##[TYPE=Table]'):
data.append(t.vector)
mean = np.mean(data, axis=1)
std = np.std(data, axis=1)
assert np.allclose([-5.83422152e-02, -9.28563233e-09], mean), mean
assert np.allclose([9.41512562e-03, 2.79081939e-08], std), std
return data
if __name__ == '__main__':
test_21_vclamp()
|
dilawar/moose-core
|
tests/rdesigneur/test_21_vclamp.py
|
Python
|
gpl-3.0
| 854
|
[
"MOOSE"
] |
300d444b40c654eef6f7eb9169d6a99ab6fa9cb3d71759cfd6bdfbeaab459101
|
"""
Test the Studio help links.
"""
from flaky import flaky
from unittest import skip
from common.test.acceptance.fixtures.course import XBlockFixtureDesc
from common.test.acceptance.tests.studio.base_studio_test import StudioCourseTest, ContainerBase
from common.test.acceptance.pages.studio.index import DashboardPage, DashboardPageWithPrograms
from common.test.acceptance.pages.studio.utils import click_studio_help, studio_help_links
from common.test.acceptance.pages.studio.index import IndexPage, HomePage
from common.test.acceptance.tests.studio.base_studio_test import StudioLibraryTest
from common.test.acceptance.pages.studio.course_info import CourseUpdatesPage
from common.test.acceptance.pages.studio.utils import click_css
from common.test.acceptance.pages.studio.library import LibraryPage
from common.test.acceptance.pages.studio.users import LibraryUsersPage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.asset_index import AssetIndexPage
from common.test.acceptance.pages.studio.edit_tabs import PagesPage
from common.test.acceptance.pages.studio.textbook_upload import TextbookUploadPage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.pages.studio.settings_graders import GradingPage
from common.test.acceptance.pages.studio.settings_group_configurations import GroupConfigurationsPage
from common.test.acceptance.pages.studio.settings_advanced import AdvancedSettingsPage
from common.test.acceptance.pages.studio.settings_certificates import CertificatesPage
from common.test.acceptance.pages.studio.import_export import ExportCoursePage, ImportCoursePage
from common.test.acceptance.pages.studio.users import CourseTeamPage
from common.test.acceptance.fixtures.programs import ProgramsConfigMixin
from common.test.acceptance.tests.helpers import (
AcceptanceTest,
assert_nav_help_link,
assert_side_bar_help_link
)
from common.test.acceptance.pages.studio.import_export import ExportLibraryPage, ImportLibraryPage
from common.test.acceptance.pages.studio.auto_auth import AutoAuthPage
class StudioHelpTest(StudioCourseTest):
"""Tests for Studio help."""
def test_studio_help_links(self):
"""Test that the help links are present and have the correct content."""
page = DashboardPage(self.browser)
page.visit()
click_studio_help(page)
links = studio_help_links(page)
expected_links = [{
'href': u'http://docs.edx.org/',
'text': u'edX Documentation',
'sr_text': u'Access documentation on http://docs.edx.org'
}, {
'href': u'https://open.edx.org/',
'text': u'Open edX Portal',
'sr_text': u'Access the Open edX Portal'
}, {
'href': u'https://www.edx.org/course/overview-creating-edx-course-edx-edx101#.VO4eaLPF-n1',
'text': u'Enroll in edX101',
'sr_text': u'Enroll in edX101: Overview of Creating an edX Course'
}, {
'href': u'https://www.edx.org/course/creating-course-edx-studio-edx-studiox',
'text': u'Enroll in StudioX',
'sr_text': u'Enroll in StudioX: Creating a Course with edX Studio'
}, {
'href': u'mailto:partner-support@example.com',
'text': u'Contact Us',
'sr_text': 'Send an email to partner-support@example.com'
}]
for expected, actual in zip(expected_links, links):
self.assertEqual(expected['href'], actual.get_attribute('href'))
self.assertEqual(expected['text'], actual.text)
self.assertEqual(
expected['sr_text'],
actual.find_element_by_xpath('following-sibling::span').text
)
class SignInHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign In' page
"""
def setUp(self):
super(SignInHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_in_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign In' page.
Given that I am on the 'Sign In" page.
And I want help about the sign in
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
sign_in_page = self.index_page.click_sign_in()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_in_page,
href=href,
signed_in=False
)
class SignUpHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign Up' page.
"""
def setUp(self):
super(SignUpHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_up_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign Up' page.
Given that I am on the 'Sign Up" page.
And I want help about the sign up
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
sign_up_page = self.index_page.click_sign_up()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_up_page,
href=href,
signed_in=False
)
class HomeHelpTest(StudioCourseTest):
"""
Tests help links on 'Home'(Courses tab) page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(HomeHelpTest, self).setUp()
self.home_page = HomePage(self.browser)
self.home_page.visit()
def test_course_home_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.home_page,
href=href
)
def test_course_home_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.home_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
class NewCourseHelpTest(AcceptanceTest):
"""
Test help links while creating a new course.
"""
def setUp(self):
super(NewCourseHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.new_course_button.present)
self.dashboard_page.click_new_course_button()
def test_course_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
def test_course_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
class NewLibraryHelpTest(AcceptanceTest):
"""
Test help links while creating a new library
"""
def setUp(self):
super(NewLibraryHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.has_new_library_button)
self.dashboard_page.click_new_library()
def test_library_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
def test_library_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
class LibraryTabHelpTest(AcceptanceTest):
"""
Test help links on the library tab present at dashboard.
"""
def setUp(self):
super(LibraryTabHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
def test_library_tab_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
self.assertTrue(self.dashboard_page.has_new_library_button)
click_css(self.dashboard_page, '#course-index-tabs .libraries-tab', 0, False)
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
class LibraryHelpTest(StudioLibraryTest):
"""
Test help links on a Library page.
"""
def setUp(self):
super(LibraryHelpTest, self).setUp()
self.library_page = LibraryPage(self.browser, self.library_key)
self.library_user_page = LibraryUsersPage(self.browser, self.library_key)
def test_library_content_nav_help(self):
"""
Scenario: Help link in navigation bar is working on content
library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course/components/libraries.html'
"""
self.library_page.visit()
# The href we want to see in anchor help element.
href = "http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/" \
"en/latest/course_components/libraries.html"
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_page,
href=href
)
def test_library_content_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on
content library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Learn more about content libraries' in the sidebar links
Then Help link should open.
And help url should end with 'course/components/libraries.html'
"""
self.library_page.visit()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_components/libraries.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_page,
href=href,
help_text='Learn more about content libraries'
)
def test_library_user_access_setting_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'User Access'
settings page of library.
Given that I am on the 'User Access' settings page of library.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with
'creating_content/libraries.html#give-other-users-access-to-your-library'
"""
self.library_user_page.visit()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#give-other-users-access-to-your-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_user_page,
href=href
)
class LibraryImportHelpTest(StudioLibraryTest):
"""
Test help links on a Library import and export pages.
"""
def setUp(self):
super(LibraryImportHelpTest, self).setUp()
self.library_import_page = ImportLibraryPage(self.browser, self.library_key)
self.library_import_page.visit()
def test_library_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'creating_content/libraries.html#import-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#import-a-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_import_page,
href=href
)
def test_library_import_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Learn more about importing a library' in the sidebar links
Then Help link should open.
And help url should end with 'creating_content/libraries.html#import-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#import-a-library'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_import_page,
href=href,
help_text='Learn more about importing a library'
)
class LibraryExportHelpTest(StudioLibraryTest):
"""
Test help links on a Library export pages.
"""
def setUp(self):
super(LibraryExportHelpTest, self).setUp()
self.library_export_page = ExportLibraryPage(self.browser, self.library_key)
self.library_export_page.visit()
def test_library_export_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'creating_content/libraries.html#export-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#export-a-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_export_page,
href=href
)
def test_library_export_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Learn more about exporting a library' in the sidebar links
Then Help link should open.
And help url should end with 'creating_content/libraries.html#export-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#export-a-library'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_export_page,
href=href,
help_text='Learn more about exporting a library'
)
class NewProgramHelpTest(ProgramsConfigMixin, AcceptanceTest):
"""
Test help links on a 'New Program' page
"""
def setUp(self):
super(NewProgramHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.program_page = DashboardPageWithPrograms(self.browser)
self.auth_page.visit()
self.set_programs_api_configuration(True)
self.program_page.visit()
def test_program_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'New Program' page
Given that I am on the 'New Program' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
self.program_page.click_new_program_button()
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.program_page,
href=href,
)
class CourseOutlineHelpTest(StudioCourseTest):
"""
Tests help links on course outline page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseOutlineHelpTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_outline_page.visit()
@skip("This scenario depends upon TNL-5460")
def test_course_outline_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Outline page
Given that I am on the Course Outline page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'developing_course/course_outline.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_outline.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_outline_page,
href=href
)
def test_course_outline_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Course Outline page
Given that I am on the Course Outline page.
And I want help about the process
And I click the 'Learn more about the course outline' in the sidebar links
Then Help link should open.
And help url should end with 'developing_course/course_outline.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_outline.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_outline_page,
href=href,
help_text='Learn more about the course outline',
index=0
)
class CourseUpdateHelpTest(StudioCourseTest):
"""
Test help links on Course Update page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseUpdateHelpTest, self).setUp()
self.course_update_page = CourseUpdatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_update_page.visit()
def test_course_update_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Course Update' page
Given that I am on the 'Course Update' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/handouts_updates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/handouts_updates.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_update_page,
href=href
)
class AssetIndexHelpTest(StudioCourseTest):
"""
Test help links on Course 'Files & Uploads' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(AssetIndexHelpTest, self).setUp()
self.course_asset_index_page = AssetIndexPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_asset_index_page.visit()
def test_asset_index_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Files & Uploads' page
Given that I am on the 'Files & Uploads' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/course_files.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/course_files.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_asset_index_page,
href=href
)
def test_asset_index_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Files & Uploads' page
Given that I am on the 'Files & Uploads' page.
And I want help about the process
And I click the 'Learn more about managing files' in the sidebar links
Then Help link should open.
And help url should end with 'course_assets/course_files.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/course_files.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_asset_index_page,
href=href,
help_text='Learn more about managing files'
)
class CoursePagesHelpTest(StudioCourseTest):
"""
Test help links on Course 'Pages' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CoursePagesHelpTest, self).setUp()
self.course_pages_page = PagesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_pages_page.visit()
def test_course_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Pages' page
Given that I am on the 'Pages' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/pages.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/pages.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_pages_page,
href=href
)
class UploadTextbookHelpTest(StudioCourseTest):
"""
Test help links on Course 'Textbooks' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(UploadTextbookHelpTest, self).setUp()
self.course_textbook_upload_page = TextbookUploadPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_textbook_upload_page.visit()
def test_course_textbook_upload_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/textbooks.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/course_assets/textbooks.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_textbook_upload_page,
href=href
)
def test_course_textbook_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Learn more about textbooks' in the sidebar links
Then Help link should open.
And help url should end with 'course_assets/textbooks.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/course_assets/textbooks.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_textbook_upload_page,
href=href,
help_text='Learn more about textbooks'
)
class StudioUnitHelpTest(ContainerBase):
"""
Tests help links on Unit page.
"""
def setUp(self, is_staff=True):
super(StudioUnitHelpTest, self).setUp(is_staff=is_staff)
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
Also add a section with a subsection and a unit.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
)
)
def test_unit_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Unit page.
Given that I am on the Unit page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'developing_course/course_units.html'
"""
unit_page = self.go_to_unit_page()
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_units.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=unit_page,
href=href
)
class SettingsHelpTest(StudioCourseTest):
"""
Tests help links on Schedule and Details Settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(SettingsHelpTest, self).setUp()
self.settings_page = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.settings_page.visit()
def test_settings_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Settings page.
Given that I am on the Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/setting_up_student_view.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/setting_up_student_view.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.settings_page,
href=href
)
class GradingPageHelpTest(StudioCourseTest):
"""
Tests help links on Grading page
"""
def setUp(self, is_staff=False, test_xss=True):
super(GradingPageHelpTest, self).setUp()
self.grading_page = GradingPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.grading_page.visit()
def test_grading_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Grading page.
Given that I am on the Grading page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'grading/index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/grading/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.grading_page,
href=href
)
class CourseTeamSettingsHelpTest(StudioCourseTest):
"""
Tests help links on Course Team settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseTeamSettingsHelpTest, self).setUp()
self.course_team_settings_page = CourseTeamPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_team_settings_page.visit()
def test_course_course_team_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Team settings page
Given that I am on the Course Team settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/course_staffing.html#add-course-team-members'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/set_up_course/course_staffing.html#add-course-team-members'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_team_settings_page,
href=href
)
class CourseGroupConfigurationHelpTest(StudioCourseTest):
"""
Tests help links on course Group Configurations settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseGroupConfigurationHelpTest, self).setUp()
self.course_group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_group_configuration_page.visit()
def test_course_group_conf_nav_help(self):
"""
Scenario: Help link in navigation bar is working on
Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_group_configuration_page,
href=href
)
def test_course_group_conf_content_group_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'content group' is working
on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should end with 'course_features/cohorts/cohorted_courseware.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_features/cohorts/cohorted_courseware.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_group_configuration_page,
href=href,
help_text='Learn More'
)
class AdvancedSettingHelpTest(StudioCourseTest):
"""
Tests help links on course Advanced Settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(AdvancedSettingHelpTest, self).setUp()
self.advanced_settings = AdvancedSettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.advanced_settings.visit()
def test_advanced_settings_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Advanced Settings page.
Given that I am on the Advanced Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.advanced_settings,
href=href
)
class CertificatePageHelpTest(StudioCourseTest):
"""
Tests help links on course Certificate settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(CertificatePageHelpTest, self).setUp()
self.certificates_page = CertificatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.certificates_page.visit()
def test_certificate_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/creating_course_certificates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/creating_course_certificates.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.certificates_page,
href=href
)
def test_certificate_page_side_bar_help(self):
"""
Scenario: Help link in side bar is working Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Learn more about certificates' in the sidebar links
Then Help link should open.
And help url should end with 'set_up_course/creating_course_certificates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/creating_course_certificates.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.certificates_page,
href=href,
help_text='Learn more about certificates',
)
class GroupExperimentConfigurationHelpTest(ContainerBase):
"""
Tests help links on course Group Configurations settings page
It is related to Experiment Group Configurations on the page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(GroupExperimentConfigurationHelpTest, self).setUp()
self.group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# self.create_poorly_configured_split_instance()
self.group_configuration_page.visit()
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
def test_course_group_configuration_experiment_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'Experiment Group Configurations'
is working on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should end with
'content_experiments_configure.html#set-up-group-configurations-in-edx-studio'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/latest/course_features' \
'/content_experiments/content_experiments_configure.html#set-up-group-configurations-in-edx-studio'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.group_configuration_page,
href=href,
help_text='Learn More',
)
class ToolsImportHelpTest(StudioCourseTest):
"""
Tests help links on tools import pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsImportHelpTest, self).setUp()
self.import_page = ImportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.import_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library import page
Given that I am on the Library import tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#import-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#import-a-course'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.import_page,
href=href
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library import page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about importing a course' in the sidebar links
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#import-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#import-a-course'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.import_page,
href=href,
help_text='Learn more about importing a course',
)
class ToolsExportHelpTest(StudioCourseTest):
"""
Tests help links on tools export pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsExportHelpTest, self).setUp()
self.export_page = ExportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.export_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library export page
Given that I am on the Library export tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#export-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#export-a-course'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.export_page,
href=href
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library export page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about exporting a course' in the sidebar links
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#export-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#export-a-course'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.export_page,
href=href,
help_text='Learn more about exporting a course',
)
class StudioWelcomeHelpTest(AcceptanceTest):
"""
Tests help link on 'Welcome' page ( User not logged in)
"""
def setUp(self):
super(StudioWelcomeHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_welcome_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Welcome' page (User not logged in).
Given that I am on the 'Welcome' page.
And I want help about the edx
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should contain 'getting_started/get_started.html'
"""
# The url we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.index_page,
href=href,
signed_in=False
)
|
jzoldak/edx-platform
|
common/test/acceptance/tests/studio/test_studio_help.py
|
Python
|
agpl-3.0
| 48,761
|
[
"VisIt"
] |
322224003833c3d35f128639b7c0debde92248014a9e4cb0f7a9f0ebacb929c3
|
# Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from robot.model import TotalStatisticsBuilder, Criticality
from robot import model, utils
from .configurer import SuiteConfigurer
from .messagefilter import MessageFilter
from .keywordremover import KeywordRemover
from .keyword import Keyword
from .suiteteardownfailed import (SuiteTeardownFailureHandler,
SuiteTeardownFailed)
from .testcase import TestCase
class TestSuite(model.TestSuite):
"""Result of a single test suite."""
__slots__ = ['message', 'starttime', 'endtime', '_criticality']
test_class = TestCase
keyword_class = Keyword
def __init__(self, name='', doc='', metadata=None, source=None,
message='', starttime=None, endtime=None):
model.TestSuite.__init__(self, name, doc, metadata, source)
#: Suite setup/teardown error message.
self.message = message
#: Suite execution start time in format ``%Y%m%d %H:%M:%S.%f``.
self.starttime = starttime
#: Suite execution end time in format ``%Y%m%d %H:%M:%S.%f``.
self.endtime = endtime
self._criticality = None
@property
def passed(self):
"""``True`` if all critical tests succeeded, ``False`` otherwise."""
return not self.statistics.critical.failed
@property
def status(self):
"""``'PASS'`` if all critical tests succeeded, ``'FAIL'`` otherwise."""
return 'PASS' if self.passed else 'FAIL'
@property
def statistics(self):
"""Suite statistics as a :class:`~robot.model.totalstatistics.TotalStatistics` object.
Recreated every time this property is accessed, so saving the results
to a variable and inspecting it is often a good idea::
stats = suite.statistics
print stats.critical.failed
print stats.all.total
print stats.message
"""
return TotalStatisticsBuilder(self).stats
@property
def full_message(self):
"""Combination of :attr:`message` and :attr:`stat_message`."""
if not self.message:
return self.stat_message
return '%s\n\n%s' % (self.message, self.stat_message)
@property
def stat_message(self):
"""String representation of the suite's :attr:`statistics`."""
return self.statistics.message
@property
def elapsedtime(self):
"""Total execution time of the suite in milliseconds."""
if self.starttime and self.endtime:
return utils.get_elapsed_time(self.starttime, self.endtime)
return sum(child.elapsedtime for child in
chain(self.suites, self.tests, self.keywords))
@property
def criticality(self):
"""Used by tests to determine are they considered critical or not.
Set using :meth:`set_criticality`.
"""
if self.parent:
return self.parent.criticality
if self._criticality is None:
self.set_criticality()
return self._criticality
def set_criticality(self, critical_tags=None, non_critical_tags=None):
"""Sets which tags are considered critical and which non-critical.
Tags can be given as lists of strings or, when giving only one,
as single strings. This information is used by tests to determine
are they considered critical or not.
Criticality can be set only to the top level test suite.
"""
if self.parent:
raise TypeError('Criticality can only be set to top level suite')
self._criticality = Criticality(critical_tags, non_critical_tags)
def remove_keywords(self, how):
"""Remove keywords based on the given condition.
:param how: Is either ``ALL``, ``PASSED``, ``FOR``, or ``WUKS``.
These values have exact same semantics as values accepted by
``--removekeywords`` command line option.
"""
self.visit(KeywordRemover(how))
def filter_messages(self, log_level='TRACE'):
"""Remove log messages below the specified ``log_level``."""
self.visit(MessageFilter(log_level))
def configure(self, **options):
"""A shortcut to configure a suite using one method call.
:param options: Passed to
:class:`~robot.result.configurer.SuiteConfigurer` that will then call
:meth:`filter`, :meth:`remove_keywords`, etc. based on them.
Example::
suite.configure(remove_keywords='PASSED',
critical_tags='smoke',
doc='Smoke test results.')
"""
self.visit(SuiteConfigurer(**options))
def handle_suite_teardown_failures(self):
"""Internal usage only."""
self.visit(SuiteTeardownFailureHandler())
def suite_teardown_failed(self, message):
"""Internal usage only."""
self.visit(SuiteTeardownFailed(message))
|
ktan2020/legacy-automation
|
win/Lib/site-packages/robot/result/testsuite.py
|
Python
|
mit
| 5,583
|
[
"VisIt"
] |
dd4d940729e57a25437a50472b8c3b9ad2546be95f774c0caf07da6d45c4c87a
|
## Aditya Gilra, NCBS, Bangalore, 2012
## Dilawar Singh, NCBS, 2015
import os
os.environ['NUMPTHREADS'] = '1'
import sys
sys.path.append('../../../python/')
import moose
from moose.utils import *
import moose.utils as mu
import count
import numpy as np
from moose.neuroml.NeuroML import NeuroML
import unittest
simdt = 10e-6 # s
plotdt = 10e-6 # s
runtime = 0.19 # s
def loadGran98NeuroML_L123(filename):
neuromlR = NeuroML()
populationDict, projectionDict = \
neuromlR.readNeuroMLFromFile(filename)
soma_path = populationDict['CA1group'][1][0].path+'/Seg0_soma_0_0'
somaVm = setupTable('somaVm',moose.Compartment(soma_path),'Vm')
soma = moose.Compartment(soma_path)
moose.reinit()
moose.start(runtime)
tvec = np.arange(0.0,runtime,simdt)
res = count.spike_train_simple_stat( somaVm.vector )
return res['number of spikes']
if __name__ == "__main__":
if len(sys.argv)<2:
filename = "CA1soma.net.xml"
else:
filename = sys.argv[1]
loadGran98NeuroML_L123(filename)
|
dilawar/moose-full
|
moose-core/tests/python/neuroml/CA1.py
|
Python
|
gpl-2.0
| 1,046
|
[
"MOOSE"
] |
6b7754ded773df0b46639c228db65ecf9fe9e4b214531103494c6fa310a9c9be
|
from PyQt4.Qt import *
import Avogadro
class Tool(QObject):
# constructor
def __init__(self):
QObject.__init__(self)
# widget = GLWidget
def paint(self, widget):
# Painter
# print("paint(", widget, ")")
return None
# widget = GLWidget
# mouseEvent = QMouseEvent
def mousePressEvent(self, widget, mouseEvent):
# print("mousePressEvent(", widget, ",", mouseEvent, ")")
# mouseEvent.accept()
return None
def mouseMoveEvent(self, widget, mouseEvent):
# print("mouseMoveEvent(", widget, ",", mouseEvent, ")")
# mouseEvent.accept()
return None
def mouseReleaseEvent(self, widget, mouseEvent):
# print("mouseReleaseEvent(", widget, ",", mouseEvent, ")")
# mouseEvent.accept()
return None
def wheelEvent(self, widget, wheelEvent):
# print("wheelEvent(", widget, ",", wheelEvent, ")")
# wheelEvent.accept()
return None
|
rcplane/periodicdisplay
|
reference/avogadro/libavogadro/examples/python/tooltemplate.py
|
Python
|
gpl-2.0
| 902
|
[
"Avogadro"
] |
f2c1834810f244495def2a16feef03166f5be584a42808dd843b89df7695093f
|
# ./sdf_model.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:e92452c8d3e28a9e27abfc9994d2007779e7f4c9
# Generated 2016-09-25 19:01:05.718182 by PyXB version 1.2.5 using Python 3.5.1.final.0
# Namespace AbsentNamespace0
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:a086fa40-8341-11e6-9413-a434d9cb994f')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.5'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object() <
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.CreateAbsentNamespace()
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument(xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM(node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: vector3
class vector3(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'vector3')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 3, 2)
_Documentation = None
vector3._CF_pattern = pyxb.binding.facets.CF_pattern()
vector3._CF_pattern.addPattern(
pattern='(\\s*(-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s+){2}((-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+))\\s*')
vector3._InitializeFacetMap(vector3._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'vector3', vector3)
_module_typeBindings.vector3 = vector3
# Atomic simple type: quaternion
class quaternion(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'quaternion')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 9, 2)
_Documentation = None
quaternion._CF_pattern = pyxb.binding.facets.CF_pattern()
quaternion._CF_pattern.addPattern(
pattern='(\\s*(-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s+){3}((-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+))\\s*')
quaternion._InitializeFacetMap(quaternion._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'quaternion', quaternion)
_module_typeBindings.quaternion = quaternion
# Atomic simple type: vector2d
class vector2d(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'vector2d')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 15, 2)
_Documentation = None
vector2d._CF_pattern = pyxb.binding.facets.CF_pattern()
vector2d._CF_pattern.addPattern(
pattern='(\\s*(-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s+)((-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+))\\s*')
vector2d._InitializeFacetMap(vector2d._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'vector2d', vector2d)
_module_typeBindings.vector2d = vector2d
# Atomic simple type: vector2i
class vector2i(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'vector2i')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 21, 2)
_Documentation = None
vector2i._CF_pattern = pyxb.binding.facets.CF_pattern()
vector2i._CF_pattern.addPattern(pattern='\\s*(-|\\+)?\\d+\\s+(-|\\+)?\\d+\\s*')
vector2i._InitializeFacetMap(vector2i._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'vector2i', vector2i)
_module_typeBindings.vector2i = vector2i
# Atomic simple type: pose
class pose(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'pose')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 27, 2)
_Documentation = None
pose._CF_pattern = pyxb.binding.facets.CF_pattern()
pose._CF_pattern.addPattern(
pattern='(\\s*(-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s+){5}((-|\\+)?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+))\\s*')
pose._InitializeFacetMap(pose._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'pose', pose)
_module_typeBindings.pose = pose
# Atomic simple type: time
class time(pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'time')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 33, 2)
_Documentation = None
time._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'time', time)
_module_typeBindings.time = time
# Atomic simple type: color
class color(pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'color')
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/types.xsd', 38, 2)
_Documentation = None
color._CF_pattern = pyxb.binding.facets.CF_pattern()
color._CF_pattern.addPattern(
pattern='(\\s*\\+?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s+){3}\\+?(\\d+(\\.\\d*)?|\\.\\d+|\\d+\\.\\d+[eE][-\\+]?[0-9]+)\\s*')
color._InitializeFacetMap(color._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'color', color)
_module_typeBindings.color = color
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 14, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element static uses Python identifier static
__static = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'static'), 'static',
'__AbsentNamespace0_CTD_ANON_static', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
17, 8), )
static = property(__static.value, __static.set, None,
'\n If set to true, the model is immovable. Otherwise the model is simulated in the dynamics engine.\n ')
# Element self_collide uses Python identifier self_collide
__self_collide = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'self_collide'),
'self_collide', '__AbsentNamespace0_CTD_ANON_self_collide',
True, pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 26, 8), )
self_collide = property(__self_collide.value, __self_collide.set, None,
'\n If set to true, all links in the model will collide with each other (except those connected by a joint). Can be overridden by the link or collision element self_collide property. Two links within a model will collide if link1.self_collide OR link2.self_collide. Links connected by a joint will never collide.\n ')
# Element allow_auto_disable uses Python identifier allow_auto_disable
__allow_auto_disable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'allow_auto_disable'), 'allow_auto_disable',
'__AbsentNamespace0_CTD_ANON_allow_auto_disable', True,
pyxb.utils.utility.Location('/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
35, 8), )
allow_auto_disable = property(__allow_auto_disable.value, __allow_auto_disable.set, None,
'\n Allows a model to auto-disable, which is means the physics engine can skip updating the model when the model is at rest. This parameter is only used by models with no joints.\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_pose', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
44, 8), )
pose = property(__pose.value, __pose.set, None,
'\n A position and orientation in the global coordinate frame for the model. Position(x,y,z) and rotation (roll, pitch yaw) in the global coordinate frame.\n ')
# Element include uses Python identifier include
__include = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'include'), 'include',
'__AbsentNamespace0_CTD_ANON_include', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
53, 8), )
include = property(__include.value, __include.set, None,
'\n Include resources from a URI. This can be used to nest models.\n ')
# Element gripper uses Python identifier gripper
__gripper = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gripper'), 'gripper',
'__AbsentNamespace0_CTD_ANON_gripper', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 4, 2), )
gripper = property(__gripper.value, __gripper.set, None, None)
# Element joint uses Python identifier joint
__joint = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'joint'), 'joint',
'__AbsentNamespace0_CTD_ANON_joint', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 10, 2), )
joint = property(__joint.value, __joint.set, None, None)
# Element link uses Python identifier link
__link = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'link'), 'link',
'__AbsentNamespace0_CTD_ANON_link', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd',
16, 2), )
link = property(__link.value, __link.set, None, None)
# Element plugin uses Python identifier plugin
__plugin = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'plugin'), 'plugin',
'__AbsentNamespace0_CTD_ANON_plugin', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plugin.xsd', 9, 2), )
plugin = property(__plugin.value, __plugin.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 106, 6)
__name._UseLocation = pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 106, 6)
name = property(__name.value, __name.set, None,
'\n A unique name for the model. This name must not match another model in the world.\n ')
_ElementMap.update({
__static.name(): __static,
__self_collide.name(): __self_collide,
__allow_auto_disable.name(): __allow_auto_disable,
__pose.name(): __pose,
__include.name(): __include,
__gripper.name(): __gripper,
__joint.name(): __joint,
__link.name(): __link,
__plugin.name(): __plugin
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON = CTD_ANON
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_(pyxb.binding.basis.complexTypeDefinition):
"""
Include resources from a URI. This can be used to nest models.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 59, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON__uri', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
62, 14), )
uri = property(__uri.value, __uri.set, None,
'\n URI to a resource, such as a model\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON__pose', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
71, 14), )
pose = property(__pose.value, __pose.set, None,
'\n Override the pose of the included model. A position and orientation in the global coordinate frame for the model. Position(x,y,z) and rotation (roll, pitch yaw) in the global coordinate frame.\n ')
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON__name', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
80, 14), )
name = property(__name.value, __name.set, None,
'\n Override the name of the included model.\n ')
# Element static uses Python identifier static
__static = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'static'), 'static',
'__AbsentNamespace0_CTD_ANON__static', True,
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
89, 14), )
static = property(__static.value, __static.set, None,
'\n Override the static value of the included model.\n ')
_ElementMap.update({
__uri.name(): __uri,
__pose.name(): __pose,
__name.name(): __name,
__static.name(): __static
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_ = CTD_ANON_
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_2(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element vertical_position uses Python identifier vertical_position
__vertical_position = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'vertical_position'), 'vertical_position',
'__AbsentNamespace0_CTD_ANON_2_vertical_position', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 13, 8), )
vertical_position = property(__vertical_position.value, __vertical_position.set, None,
'\n \n Noise parameters for vertical position\n \n ')
# Element vertical_velocity uses Python identifier vertical_velocity
__vertical_velocity = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'vertical_velocity'), 'vertical_velocity',
'__AbsentNamespace0_CTD_ANON_2_vertical_velocity', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 28, 8), )
vertical_velocity = property(__vertical_velocity.value, __vertical_velocity.set, None,
'\n \n Noise parameters for vertical velocity\n \n ')
_ElementMap.update({
__vertical_position.name(): __vertical_position,
__vertical_velocity.name(): __vertical_velocity
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_2 = CTD_ANON_2
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_3(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for vertical position
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 21, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_3 = CTD_ANON_3
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_4(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for vertical velocity
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 36, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_4 = CTD_ANON_4
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_5(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON_5_uri', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 13, 8), )
uri = property(__uri.value, __uri.set, None, '\n URI of the audio media.\n ')
# Element pitch uses Python identifier pitch
__pitch = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pitch'), 'pitch',
'__AbsentNamespace0_CTD_ANON_5_pitch', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 22, 8), )
pitch = property(__pitch.value, __pitch.set, None, '\n Pitch for the audio media, in Hz\n ')
# Element gain uses Python identifier gain
__gain = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'gain'), 'gain',
'__AbsentNamespace0_CTD_ANON_5_gain', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 31, 8), )
gain = property(__gain.value, __gain.set, None, '\n Gain for the audio media, in dB.\n ')
# Element contact uses Python identifier contact
__contact = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'contact'), 'contact',
'__AbsentNamespace0_CTD_ANON_5_contact', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 40, 8), )
contact = property(__contact.value, __contact.set, None,
'\n List of collision objects that will trigger audio playback.\n ')
# Element loop uses Python identifier loop
__loop = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'loop'), 'loop',
'__AbsentNamespace0_CTD_ANON_5_loop', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 62, 8), )
loop = property(__loop.value, __loop.set, None,
'\n True to make the audio source loop playback.\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_5_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 71, 8), )
pose = property(__pose.value, __pose.set, None,
'\n A position and orientation in the parent coordinate frame for the audio source. Position(x,y,z) and rotation (roll, pitch yaw) in the parent coordinate frame.\n ')
_ElementMap.update({
__uri.name(): __uri,
__pitch.name(): __pitch,
__gain.name(): __gain,
__contact.name(): __contact,
__loop.name(): __loop,
__pose.name(): __pose
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_5 = CTD_ANON_5
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_6(pyxb.binding.basis.complexTypeDefinition):
"""
List of collision objects that will trigger audio playback.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 46, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element collision uses Python identifier collision
__collision = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'collision'), 'collision',
'__AbsentNamespace0_CTD_ANON_6_collision', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 49, 14), )
collision = property(__collision.value, __collision.set, None,
'\n Name of child collision element that will trigger audio playback.\n ')
_ElementMap.update({
__collision.name(): __collision
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_6 = CTD_ANON_6
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_7(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/box_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element size uses Python identifier size
__size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'size'), 'size',
'__AbsentNamespace0_CTD_ANON_7_size', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/box_shape.xsd', 13, 8), )
size = property(__size.value, __size.set, None,
'\n The three side lengths of the box. The origin of the box is in its geometric center (inside the center of the box).\n ')
_ElementMap.update({
__size.name(): __size
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_7 = CTD_ANON_7
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_8(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_8_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 13, 8), )
pose = property(__pose.value, __pose.set, None,
'\n A position and orientation in the parent coordinate frame for the camera.\n ')
# Element horizontal_fov uses Python identifier horizontal_fov
__horizontal_fov = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'horizontal_fov'),
'horizontal_fov',
'__AbsentNamespace0_CTD_ANON_8_horizontal_fov', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 22, 8), )
horizontal_fov = property(__horizontal_fov.value, __horizontal_fov.set, None,
'\n Horizontal field of view\n ')
# Element image uses Python identifier image
__image = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'image'), 'image',
'__AbsentNamespace0_CTD_ANON_8_image', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 31, 8), )
image = property(__image.value, __image.set, None,
'\n The image size in pixels and format.\n ')
# Element clip uses Python identifier clip
__clip = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'clip'), 'clip',
'__AbsentNamespace0_CTD_ANON_8_clip', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 71, 8), )
clip = property(__clip.value, __clip.set, None,
'\n The near and far clip planes. Objects closer or farther than these planes are not rendered.\n ')
# Element save uses Python identifier save
__save = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'save'), 'save',
'__AbsentNamespace0_CTD_ANON_8_save', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 102, 8), )
save = property(__save.value, __save.set, None,
'\n Enable or disable saving of camera frames.\n ')
# Element depth_camera uses Python identifier depth_camera
__depth_camera = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'depth_camera'),
'depth_camera',
'__AbsentNamespace0_CTD_ANON_8_depth_camera', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 131, 8), )
depth_camera = property(__depth_camera.value, __depth_camera.set, None,
'\n Depth camera parameters\n ')
# Element noise uses Python identifier noise
__noise = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'noise'), 'noise',
'__AbsentNamespace0_CTD_ANON_8_noise', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 153, 8), )
noise = property(__noise.value, __noise.set, None,
'\n The properties of the noise model that should be applied to generated images\n ')
# Element distortion uses Python identifier distortion
__distortion = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'distortion'),
'distortion', '__AbsentNamespace0_CTD_ANON_8_distortion',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 193, 8), )
distortion = property(__distortion.value, __distortion.set, None,
'\n Lens distortion to be applied to camera images. See http://en.wikipedia.org/wiki/Distortion_(optics)#Software_correction\n ')
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_8_name', pyxb.binding.datatypes.string,
unicode_default='__default__')
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 260, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 260, 6)
name = property(__name.value, __name.set, None, '\n An optional name for the camera.\n ')
_ElementMap.update({
__pose.name(): __pose,
__horizontal_fov.name(): __horizontal_fov,
__image.name(): __image,
__clip.name(): __clip,
__save.name(): __save,
__depth_camera.name(): __depth_camera,
__noise.name(): __noise,
__distortion.name(): __distortion
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_8 = CTD_ANON_8
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_9(pyxb.binding.basis.complexTypeDefinition):
"""
The image size in pixels and format.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 37, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element width uses Python identifier width
__width = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'width'), 'width',
'__AbsentNamespace0_CTD_ANON_9_width', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 40, 14), )
width = property(__width.value, __width.set, None, '\n Width in pixels\n ')
# Element height uses Python identifier height
__height = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'height'), 'height',
'__AbsentNamespace0_CTD_ANON_9_height', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 49, 14), )
height = property(__height.value, __height.set, None, '\n Height in pixels \n ')
# Element format uses Python identifier format
__format = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'format'), 'format',
'__AbsentNamespace0_CTD_ANON_9_format', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 58, 14), )
format = property(__format.value, __format.set, None,
'\n (L8|R8G8B8|B8G8R8|BAYER_RGGB8|BAYER_BGGR8|BAYER_GBRG8|BAYER_GRBG8)\n ')
_ElementMap.update({
__width.name(): __width,
__height.name(): __height,
__format.name(): __format
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_9 = CTD_ANON_9
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_10(pyxb.binding.basis.complexTypeDefinition):
"""
The near and far clip planes. Objects closer or farther than these planes are not rendered.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 77, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element near uses Python identifier near
__near = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'near'), 'near',
'__AbsentNamespace0_CTD_ANON_10_near', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 80, 14), )
near = property(__near.value, __near.set, None, '\n Near clipping plane\n ')
# Element far uses Python identifier far
__far = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'far'), 'far',
'__AbsentNamespace0_CTD_ANON_10_far', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 89, 14), )
far = property(__far.value, __far.set, None, '\n Far clipping plane\n ')
_ElementMap.update({
__near.name(): __near,
__far.name(): __far
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_10 = CTD_ANON_10
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_11(pyxb.binding.basis.complexTypeDefinition):
"""
Enable or disable saving of camera frames.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 108, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element path uses Python identifier path
__path = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'path'), 'path',
'__AbsentNamespace0_CTD_ANON_11_path', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 111, 14), )
path = property(__path.value, __path.set, None,
'\n The path name which will hold the frame data. If path name is relative, then directory is relative to current working directory.\n ')
# Attribute enabled uses Python identifier enabled
__enabled = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'enabled'), 'enabled',
'__AbsentNamespace0_CTD_ANON_11_enabled',
pyxb.binding.datatypes.boolean, required=True)
__enabled._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 120, 12)
__enabled._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 120, 12)
enabled = property(__enabled.value, __enabled.set, None,
'\n True = saving enabled\n ')
_ElementMap.update({
__path.name(): __path
})
_AttributeMap.update({
__enabled.name(): __enabled
})
_module_typeBindings.CTD_ANON_11 = CTD_ANON_11
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_12(pyxb.binding.basis.complexTypeDefinition):
"""
Depth camera parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 137, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element output uses Python identifier output
__output = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'output'), 'output',
'__AbsentNamespace0_CTD_ANON_12_output', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 140, 14), )
output = property(__output.value, __output.set, None, '\n Type of output\n ')
_ElementMap.update({
__output.name(): __output
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_12 = CTD_ANON_12
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_13(pyxb.binding.basis.complexTypeDefinition):
"""
The properties of the noise model that should be applied to generated images
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 159, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element type uses Python identifier type
__type = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_13_type', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 162, 14), )
type = property(__type.value, __type.set, None,
'\n The type of noise. Currently supported types are: "gaussian" (draw additive noise values independently for each pixel from a Gaussian distribution).\n ')
# Element mean uses Python identifier mean
__mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mean'), 'mean',
'__AbsentNamespace0_CTD_ANON_13_mean', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 171, 14), )
mean = property(__mean.value, __mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ')
# Element stddev uses Python identifier stddev
__stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stddev'), 'stddev',
'__AbsentNamespace0_CTD_ANON_13_stddev', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 180, 14), )
stddev = property(__stddev.value, __stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ')
_ElementMap.update({
__type.name(): __type,
__mean.name(): __mean,
__stddev.name(): __stddev
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_13 = CTD_ANON_13
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_14(pyxb.binding.basis.complexTypeDefinition):
"""
Lens distortion to be applied to camera images. See http://en.wikipedia.org/wiki/Distortion_(optics)#Software_correction
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 199, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element k1 uses Python identifier k1
__k1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'k1'), 'k1',
'__AbsentNamespace0_CTD_ANON_14_k1', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd',
202, 14), )
k1 = property(__k1.value, __k1.set, None,
'\n The radial distortion coefficient k1\n ')
# Element k2 uses Python identifier k2
__k2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'k2'), 'k2',
'__AbsentNamespace0_CTD_ANON_14_k2', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd',
211, 14), )
k2 = property(__k2.value, __k2.set, None,
'\n The radial distortion coefficient k2\n ')
# Element k3 uses Python identifier k3
__k3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'k3'), 'k3',
'__AbsentNamespace0_CTD_ANON_14_k3', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd',
220, 14), )
k3 = property(__k3.value, __k3.set, None,
'\n The radial distortion coefficient k3\n ')
# Element p1 uses Python identifier p1
__p1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'p1'), 'p1',
'__AbsentNamespace0_CTD_ANON_14_p1', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd',
229, 14), )
p1 = property(__p1.value, __p1.set, None,
'\n The tangential distortion coefficient p1\n ')
# Element p2 uses Python identifier p2
__p2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'p2'), 'p2',
'__AbsentNamespace0_CTD_ANON_14_p2', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd',
238, 14), )
p2 = property(__p2.value, __p2.set, None,
'\n The tangential distortion coefficient p2\n ')
# Element center uses Python identifier center
__center = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'center'), 'center',
'__AbsentNamespace0_CTD_ANON_14_center', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 247, 14), )
center = property(__center.value, __center.set, None,
'\n The distortion center or principal point\n ')
_ElementMap.update({
__k1.name(): __k1,
__k2.name(): __k2,
__k3.name(): __k3,
__p1.name(): __p1,
__p2.name(): __p2,
__center.name(): __center
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_14 = CTD_ANON_14
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_15(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 12, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element laser_retro uses Python identifier laser_retro
__laser_retro = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'laser_retro'),
'laser_retro', '__AbsentNamespace0_CTD_ANON_15_laser_retro',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/collision.xsd', 15, 8), )
laser_retro = property(__laser_retro.value, __laser_retro.set, None,
'\n intensity value returned by laser sensor.\n ')
# Element max_contacts uses Python identifier max_contacts
__max_contacts = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_contacts'),
'max_contacts',
'__AbsentNamespace0_CTD_ANON_15_max_contacts', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/collision.xsd', 24, 8), )
max_contacts = property(__max_contacts.value, __max_contacts.set, None,
'\n Maximum number of contacts allowed between two entities. This value overrides the max_contacts element defined in physics.\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_15_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/collision.xsd', 33, 8), )
pose = property(__pose.value, __pose.set, None,
'\n The reference frame of the collision element, relative to the reference frame of the link.\n ')
# Element geometry uses Python identifier geometry
__geometry = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'geometry'), 'geometry',
'__AbsentNamespace0_CTD_ANON_15_geometry', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/geometry.xsd', 17, 2), )
geometry = property(__geometry.value, __geometry.set, None, None)
# Element surface uses Python identifier surface
__surface = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'surface'), 'surface',
'__AbsentNamespace0_CTD_ANON_15_surface', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 9, 2), )
surface = property(__surface.value, __surface.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_15_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 44, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 44, 6)
name = property(__name.value, __name.set, None,
'\n Unique name for the collision element within the scope of the parent link.\n ')
_ElementMap.update({
__laser_retro.name(): __laser_retro,
__max_contacts.name(): __max_contacts,
__pose.name(): __pose,
__geometry.name(): __geometry,
__surface.name(): __surface
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_15 = CTD_ANON_15
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_16(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element collision uses Python identifier collision
__collision = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'collision'), 'collision',
'__AbsentNamespace0_CTD_ANON_16_collision', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/contact.xsd', 13, 8), )
collision = property(__collision.value, __collision.set, None,
'\n name of the collision element within a link that acts as the contact sensor.\n ')
# Element topic uses Python identifier topic
__topic = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'topic'), 'topic',
'__AbsentNamespace0_CTD_ANON_16_topic', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/contact.xsd', 22, 8), )
topic = property(__topic.value, __topic.set, None,
'\n Topic on which contact data is published.\n ')
_ElementMap.update({
__collision.name(): __collision,
__topic.name(): __topic
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_16 = CTD_ANON_16
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_17(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/cylinder_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element radius uses Python identifier radius
__radius = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'radius'), 'radius',
'__AbsentNamespace0_CTD_ANON_17_radius', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 13, 8), )
radius = property(__radius.value, __radius.set, None, '\n Radius of the cylinder\n ')
# Element length uses Python identifier length
__length = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'length'), 'length',
'__AbsentNamespace0_CTD_ANON_17_length', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 22, 8), )
length = property(__length.value, __length.set, None, '\n Length of the cylinder\n ')
_ElementMap.update({
__radius.name(): __radius,
__length.name(): __length
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_17 = CTD_ANON_17
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_18(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element frame uses Python identifier frame
__frame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'frame'), 'frame',
'__AbsentNamespace0_CTD_ANON_18_frame', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/forcetorque.xsd', 13, 8), )
frame = property(__frame.value, __frame.set, None,
'\n \n Frame in which to report the wrench values. Currently supported frames are:\n "parent" report the wrench expressed in the orientation of the parent link frame,\n "child" report the wrench expressed in the orientation of the child link frame,\n "sensor" report the wrench expressed in the orientation of the joint sensor frame.\n Note that for each option the point with respect to which the \n torque component of the wrench is expressed is the joint origin.\n \n ')
# Element measure_direction uses Python identifier measure_direction
__measure_direction = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'measure_direction'), 'measure_direction',
'__AbsentNamespace0_CTD_ANON_18_measure_direction', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 29, 8), )
measure_direction = property(__measure_direction.value, __measure_direction.set, None,
'\n \n Direction of the wrench measured by the sensor. The supported options are:\n "parent_to_child" if the measured wrench is the one applied by parent link on the child link,\n "child_to_parent" if the measured wrench is the one applied by the child link on the parent link.\n \n ')
_ElementMap.update({
__frame.name(): __frame,
__measure_direction.name(): __measure_direction
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_18 = CTD_ANON_18
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_19(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 18, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element box uses Python identifier box
__box = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'box'), 'box',
'__AbsentNamespace0_CTD_ANON_19_box', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/box_shape.xsd', 9, 2), )
box = property(__box.value, __box.set, None, None)
# Element cylinder uses Python identifier cylinder
__cylinder = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'cylinder'), 'cylinder',
'__AbsentNamespace0_CTD_ANON_19_cylinder', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 9, 2), )
cylinder = property(__cylinder.value, __cylinder.set, None, None)
# Element empty uses Python identifier empty
__empty = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'empty'), 'empty',
'__AbsentNamespace0_CTD_ANON_19_empty', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/geometry.xsd', 21, 8), )
empty = property(__empty.value, __empty.set, None,
'\n You can use the empty tag to make empty geometries.\n ')
# Element heightmap uses Python identifier heightmap
__heightmap = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heightmap'),
'heightmap', '__AbsentNamespace0_CTD_ANON_19_heightmap', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 9,
2), )
heightmap = property(__heightmap.value, __heightmap.set, None, None)
# Element image uses Python identifier image
__image = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'image'), 'image',
'__AbsentNamespace0_CTD_ANON_19_image', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 9, 2), )
image = property(__image.value, __image.set, None, None)
# Element mesh uses Python identifier mesh
__mesh = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'mesh'), 'mesh',
'__AbsentNamespace0_CTD_ANON_19_mesh', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 9, 2), )
mesh = property(__mesh.value, __mesh.set, None, None)
# Element plane uses Python identifier plane
__plane = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'plane'), 'plane',
'__AbsentNamespace0_CTD_ANON_19_plane', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plane_shape.xsd', 9, 2), )
plane = property(__plane.value, __plane.set, None, None)
# Element polyline uses Python identifier polyline
__polyline = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'polyline'), 'polyline',
'__AbsentNamespace0_CTD_ANON_19_polyline', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 9, 2), )
polyline = property(__polyline.value, __polyline.set, None, None)
# Element sphere uses Python identifier sphere
__sphere = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sphere'), 'sphere',
'__AbsentNamespace0_CTD_ANON_19_sphere', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sphere_shape.xsd', 9, 2), )
sphere = property(__sphere.value, __sphere.set, None, None)
_ElementMap.update({
__box.name(): __box,
__cylinder.name(): __cylinder,
__empty.name(): __empty,
__heightmap.name(): __heightmap,
__image.name(): __image,
__mesh.name(): __mesh,
__plane.name(): __plane,
__polyline.name(): __polyline,
__sphere.name(): __sphere
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_19 = CTD_ANON_19
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_20(pyxb.binding.basis.complexTypeDefinition):
"""
You can use the empty tag to make empty geometries.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 27, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_20 = CTD_ANON_20
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_21(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element position_sensing uses Python identifier position_sensing
__position_sensing = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'position_sensing'),
'position_sensing',
'__AbsentNamespace0_CTD_ANON_21_position_sensing',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 13, 8), )
position_sensing = property(__position_sensing.value, __position_sensing.set, None,
'\n \n Parameters related to GPS position measurement.\n \n ')
# Element velocity_sensing uses Python identifier velocity_sensing
__velocity_sensing = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'velocity_sensing'),
'velocity_sensing',
'__AbsentNamespace0_CTD_ANON_21_velocity_sensing',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 58, 8), )
velocity_sensing = property(__velocity_sensing.value, __velocity_sensing.set, None,
'\n \n Parameters related to GPS position measurement.\n \n ')
_ElementMap.update({
__position_sensing.name(): __position_sensing,
__velocity_sensing.name(): __velocity_sensing
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_21 = CTD_ANON_21
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_22(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to GPS position measurement.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 21, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element horizontal uses Python identifier horizontal
__horizontal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'horizontal'),
'horizontal', '__AbsentNamespace0_CTD_ANON_22_horizontal',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 24, 14), )
horizontal = property(__horizontal.value, __horizontal.set, None,
'\n \n Noise parameters for horizontal position measurement, in units of meters.\n \n ')
# Element vertical uses Python identifier vertical
__vertical = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'vertical'), 'vertical',
'__AbsentNamespace0_CTD_ANON_22_vertical', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 39, 14), )
vertical = property(__vertical.value, __vertical.set, None,
'\n \n Noise parameters for vertical position measurement, in units of meters.\n \n ')
_ElementMap.update({
__horizontal.name(): __horizontal,
__vertical.name(): __vertical
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_22 = CTD_ANON_22
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_23(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for horizontal position measurement, in units of meters.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 32, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_23 = CTD_ANON_23
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_24(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for vertical position measurement, in units of meters.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 47, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_24 = CTD_ANON_24
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_25(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to GPS position measurement.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 66, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element horizontal uses Python identifier horizontal
__horizontal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'horizontal'),
'horizontal', '__AbsentNamespace0_CTD_ANON_25_horizontal',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 69, 14), )
horizontal = property(__horizontal.value, __horizontal.set, None,
'\n \n Noise parameters for horizontal velocity measurement, in units of meters/second.\n \n ')
# Element vertical uses Python identifier vertical
__vertical = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'vertical'), 'vertical',
'__AbsentNamespace0_CTD_ANON_25_vertical', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gps.xsd', 84, 14), )
vertical = property(__vertical.value, __vertical.set, None,
'\n \n Noise parameters for vertical velocity measurement, in units of meters/second.\n \n ')
_ElementMap.update({
__horizontal.name(): __horizontal,
__vertical.name(): __vertical
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_25 = CTD_ANON_25
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_26(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for horizontal velocity measurement, in units of meters/second.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 77, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_26 = CTD_ANON_26
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_27(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for vertical velocity measurement, in units of meters/second.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 92, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_27 = CTD_ANON_27
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_28(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 5, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element grasp_check uses Python identifier grasp_check
__grasp_check = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'grasp_check'),
'grasp_check', '__AbsentNamespace0_CTD_ANON_28_grasp_check',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 8, 8), )
grasp_check = property(__grasp_check.value, __grasp_check.set, None, None)
# Element gripper_link uses Python identifier gripper_link
__gripper_link = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'gripper_link'),
'gripper_link',
'__AbsentNamespace0_CTD_ANON_28_gripper_link', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 28, 8), )
gripper_link = property(__gripper_link.value, __gripper_link.set, None, None)
# Element palm_link uses Python identifier palm_link
__palm_link = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'palm_link'), 'palm_link',
'__AbsentNamespace0_CTD_ANON_28_palm_link', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 32, 8), )
palm_link = property(__palm_link.value, __palm_link.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_28_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 36, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 36, 6)
name = property(__name.value, __name.set, None, None)
_ElementMap.update({
__grasp_check.name(): __grasp_check,
__gripper_link.name(): __gripper_link,
__palm_link.name(): __palm_link
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_28 = CTD_ANON_28
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_29(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 9, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element detach_steps uses Python identifier detach_steps
__detach_steps = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'detach_steps'),
'detach_steps',
'__AbsentNamespace0_CTD_ANON_29_detach_steps', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 12, 14), )
detach_steps = property(__detach_steps.value, __detach_steps.set, None, None)
# Element attach_steps uses Python identifier attach_steps
__attach_steps = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'attach_steps'),
'attach_steps',
'__AbsentNamespace0_CTD_ANON_29_attach_steps', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 16, 14), )
attach_steps = property(__attach_steps.value, __attach_steps.set, None, None)
# Element min_contact_count uses Python identifier min_contact_count
__min_contact_count = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'min_contact_count'), 'min_contact_count',
'__AbsentNamespace0_CTD_ANON_29_min_contact_count', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 20, 14), )
min_contact_count = property(__min_contact_count.value, __min_contact_count.set, None, None)
_ElementMap.update({
__detach_steps.name(): __detach_steps,
__attach_steps.name(): __attach_steps,
__min_contact_count.name(): __min_contact_count
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_29 = CTD_ANON_29
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_30(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON_30_uri', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 13, 8), )
uri = property(__uri.value, __uri.set, None, '\n URI to a grayscale image file\n ')
# Element size uses Python identifier size
__size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'size'), 'size',
'__AbsentNamespace0_CTD_ANON_30_size', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 22, 8), )
size = property(__size.value, __size.set, None,
'\n The size of the heightmap in world units.\n When loading an image: "size" is used if present, otherwise defaults to 1x1x1.\n When loading a DEM: "size" is used if present, otherwise defaults to true size of DEM.\n \n ')
# Element pos uses Python identifier pos
__pos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pos'), 'pos',
'__AbsentNamespace0_CTD_ANON_30_pos', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 34, 8), )
pos = property(__pos.value, __pos.set, None, '\n A position offset.\n ')
# Element texture uses Python identifier texture
__texture = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'texture'), 'texture',
'__AbsentNamespace0_CTD_ANON_30_texture', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 43, 8), )
texture = property(__texture.value, __texture.set, None,
'\n The heightmap can contain multiple textures. The order of the texture matters. The first texture will appear at the lowest height, and the last texture at the highest height. Use blend to control the height thresholds and fade between textures.\n ')
# Element blend uses Python identifier blend
__blend = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'blend'), 'blend',
'__AbsentNamespace0_CTD_ANON_30_blend', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 83, 8), )
blend = property(__blend.value, __blend.set, None,
'\n The blend tag controls how two adjacent textures are mixed. The number of blend elements should equal one less than the number of textures.\n ')
# Element use_terrain_paging uses Python identifier use_terrain_paging
__use_terrain_paging = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'use_terrain_paging'), 'use_terrain_paging',
'__AbsentNamespace0_CTD_ANON_30_use_terrain_paging', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd', 114, 8), )
use_terrain_paging = property(__use_terrain_paging.value, __use_terrain_paging.set, None,
'\n Set if the rendering engine will use terrain paging\n ')
_ElementMap.update({
__uri.name(): __uri,
__size.name(): __size,
__pos.name(): __pos,
__texture.name(): __texture,
__blend.name(): __blend,
__use_terrain_paging.name(): __use_terrain_paging
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_30 = CTD_ANON_30
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_31(pyxb.binding.basis.complexTypeDefinition):
"""
The heightmap can contain multiple textures. The order of the texture matters. The first texture will appear at the lowest height, and the last texture at the highest height. Use blend to control the height thresholds and fade between textures.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd', 49, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element size uses Python identifier size
__size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'size'), 'size',
'__AbsentNamespace0_CTD_ANON_31_size', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 52, 14), )
size = property(__size.value, __size.set, None,
'\n Size of the applied texture in meters.\n ')
# Element diffuse uses Python identifier diffuse
__diffuse = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'diffuse'), 'diffuse',
'__AbsentNamespace0_CTD_ANON_31_diffuse', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 61,
14), )
diffuse = property(__diffuse.value, __diffuse.set, None,
'\n Diffuse texture image filename\n ')
# Element normal uses Python identifier normal
__normal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'normal'), 'normal',
'__AbsentNamespace0_CTD_ANON_31_normal', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 70, 14), )
normal = property(__normal.value, __normal.set, None,
'\n Normalmap texture image filename\n ')
_ElementMap.update({
__size.name(): __size,
__diffuse.name(): __diffuse,
__normal.name(): __normal
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_31 = CTD_ANON_31
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_32(pyxb.binding.basis.complexTypeDefinition):
"""
The blend tag controls how two adjacent textures are mixed. The number of blend elements should equal one less than the number of textures.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd', 89, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element min_height uses Python identifier min_height
__min_height = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_height'),
'min_height', '__AbsentNamespace0_CTD_ANON_32_min_height',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 92, 14), )
min_height = property(__min_height.value, __min_height.set, None,
'\n Min height of a blend layer\n ')
# Element fade_dist uses Python identifier fade_dist
__fade_dist = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'fade_dist'), 'fade_dist',
'__AbsentNamespace0_CTD_ANON_32_fade_dist', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 101,
14), )
fade_dist = property(__fade_dist.value, __fade_dist.set, None,
'\n Distance over which the blend occurs\n ')
_ElementMap.update({
__min_height.name(): __min_height,
__fade_dist.name(): __fade_dist
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_32 = CTD_ANON_32
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_33(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON_33_uri', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 13, 8), )
uri = property(__uri.value, __uri.set, None, '\n URI of the grayscale image file\n ')
# Element scale uses Python identifier scale
__scale = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'scale'), 'scale',
'__AbsentNamespace0_CTD_ANON_33_scale', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 22, 8), )
scale = property(__scale.value, __scale.set, None,
'\n Scaling factor applied to the image\n ')
# Element threshold uses Python identifier threshold
__threshold = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'threshold'), 'threshold',
'__AbsentNamespace0_CTD_ANON_33_threshold', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 31, 8), )
threshold = property(__threshold.value, __threshold.set, None, '\n Grayscale threshold\n ')
# Element height uses Python identifier height
__height = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'height'), 'height',
'__AbsentNamespace0_CTD_ANON_33_height', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 40, 8), )
height = property(__height.value, __height.set, None, '\n Height of the extruded boxes\n ')
# Element granularity uses Python identifier granularity
__granularity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'granularity'),
'granularity', '__AbsentNamespace0_CTD_ANON_33_granularity',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/image_shape.xsd', 49, 8), )
granularity = property(__granularity.value, __granularity.set, None,
'\n The amount of error in the model\n ')
_ElementMap.update({
__uri.name(): __uri,
__scale.name(): __scale,
__threshold.name(): __threshold,
__height.name(): __height,
__granularity.name(): __granularity
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_33 = CTD_ANON_33
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_34(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element topic uses Python identifier topic
__topic = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'topic'), 'topic',
'__AbsentNamespace0_CTD_ANON_34_topic', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
13, 8), )
topic = property(__topic.value, __topic.set, None,
'\n Topic on which data is published.\n ')
# Element angular_velocity uses Python identifier angular_velocity
__angular_velocity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'angular_velocity'),
'angular_velocity',
'__AbsentNamespace0_CTD_ANON_34_angular_velocity',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 22, 8), )
angular_velocity = property(__angular_velocity.value, __angular_velocity.set, None,
'\n These elements are specific to body-frame angular velocity,\n which is expressed in radians per second\n ')
# Element linear_acceleration uses Python identifier linear_acceleration
__linear_acceleration = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'linear_acceleration'), 'linear_acceleration',
'__AbsentNamespace0_CTD_ANON_34_linear_acceleration', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 75, 8), )
linear_acceleration = property(__linear_acceleration.value, __linear_acceleration.set, None,
'\n These elements are specific to body-frame linear acceleration,\n which is expressed in meters per second squared\n ')
# Element noise uses Python identifier noise
__noise = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'noise'), 'noise',
'__AbsentNamespace0_CTD_ANON_34_noise', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
128, 8), )
noise = property(__noise.value, __noise.set, None,
'\n The properties of the noise model that should be applied to generated data\n ')
_ElementMap.update({
__topic.name(): __topic,
__angular_velocity.name(): __angular_velocity,
__linear_acceleration.name(): __linear_acceleration,
__noise.name(): __noise
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_34 = CTD_ANON_34
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_35(pyxb.binding.basis.complexTypeDefinition):
"""
These elements are specific to body-frame angular velocity,
which is expressed in radians per second
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 29, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element x uses Python identifier x
__x = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'x'), 'x',
'__AbsentNamespace0_CTD_ANON_35_x', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 32,
14), )
x = property(__x.value, __x.set, None,
'\n Angular velocity about the X axis\n ')
# Element y uses Python identifier y
__y = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'y'), 'y',
'__AbsentNamespace0_CTD_ANON_35_y', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 45,
14), )
y = property(__y.value, __y.set, None,
'\n Angular velocity about the Y axis\n ')
# Element z uses Python identifier z
__z = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'z'), 'z',
'__AbsentNamespace0_CTD_ANON_35_z', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 58,
14), )
z = property(__z.value, __z.set, None,
'\n Angular velocity about the Z axis\n ')
_ElementMap.update({
__x.name(): __x,
__y.name(): __y,
__z.name(): __z
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_35 = CTD_ANON_35
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_36(pyxb.binding.basis.complexTypeDefinition):
"""
Angular velocity about the X axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 38, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_36 = CTD_ANON_36
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_37(pyxb.binding.basis.complexTypeDefinition):
"""
Angular velocity about the Y axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 51, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_37 = CTD_ANON_37
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_38(pyxb.binding.basis.complexTypeDefinition):
"""
Angular velocity about the Z axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 64, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_38 = CTD_ANON_38
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_39(pyxb.binding.basis.complexTypeDefinition):
"""
These elements are specific to body-frame linear acceleration,
which is expressed in meters per second squared
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 82, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element x uses Python identifier x
__x = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'x'), 'x',
'__AbsentNamespace0_CTD_ANON_39_x', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 85,
14), )
x = property(__x.value, __x.set, None,
'\n Linear acceleration about the X axis\n ')
# Element y uses Python identifier y
__y = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'y'), 'y',
'__AbsentNamespace0_CTD_ANON_39_y', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 98,
14), )
y = property(__y.value, __y.set, None,
'\n Linear acceleration about the Y axis\n ')
# Element z uses Python identifier z
__z = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'z'), 'z',
'__AbsentNamespace0_CTD_ANON_39_z', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
111, 14), )
z = property(__z.value, __z.set, None,
'\n Linear acceleration about the Z axis\n ')
_ElementMap.update({
__x.name(): __x,
__y.name(): __y,
__z.name(): __z
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_39 = CTD_ANON_39
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_40(pyxb.binding.basis.complexTypeDefinition):
"""
Linear acceleration about the X axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 91, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_40 = CTD_ANON_40
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_41(pyxb.binding.basis.complexTypeDefinition):
"""
Linear acceleration about the Y axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 104, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_41 = CTD_ANON_41
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_42(pyxb.binding.basis.complexTypeDefinition):
"""
Linear acceleration about the Z axis
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 117, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_42 = CTD_ANON_42
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_43(pyxb.binding.basis.complexTypeDefinition):
"""
The properties of the noise model that should be applied to generated data
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 134, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element type uses Python identifier type
__type = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_43_type', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
137, 14), )
type = property(__type.value, __type.set, None,
'\n The type of noise. Currently supported types are: "gaussian" (draw noise values independently for each beam from a Gaussian distribution).\n ')
# Element rate uses Python identifier rate
__rate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'rate'), 'rate',
'__AbsentNamespace0_CTD_ANON_43_rate', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
146, 14), )
rate = property(__rate.value, __rate.set, None,
'\n Noise parameters for angular rates.\n ')
# Element accel uses Python identifier accel
__accel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'accel'), 'accel',
'__AbsentNamespace0_CTD_ANON_43_accel', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
195, 14), )
accel = property(__accel.value, __accel.set, None,
'\n Noise parameters for linear accelerations.\n ')
_ElementMap.update({
__type.name(): __type,
__rate.name(): __rate,
__accel.name(): __accel
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_43 = CTD_ANON_43
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_44(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for angular rates.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 152, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element mean uses Python identifier mean
__mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mean'), 'mean',
'__AbsentNamespace0_CTD_ANON_44_mean', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
155, 20), )
mean = property(__mean.value, __mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ')
# Element stddev uses Python identifier stddev
__stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stddev'), 'stddev',
'__AbsentNamespace0_CTD_ANON_44_stddev', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 164, 20), )
stddev = property(__stddev.value, __stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ')
# Element bias_mean uses Python identifier bias_mean
__bias_mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bias_mean'), 'bias_mean',
'__AbsentNamespace0_CTD_ANON_44_bias_mean', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 173, 20), )
bias_mean = property(__bias_mean.value, __bias_mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which bias values are drawn.\n ')
# Element bias_stddev uses Python identifier bias_stddev
__bias_stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bias_stddev'),
'bias_stddev', '__AbsentNamespace0_CTD_ANON_44_bias_stddev',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 182, 20), )
bias_stddev = property(__bias_stddev.value, __bias_stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which bias values are drawn.\n ')
_ElementMap.update({
__mean.name(): __mean,
__stddev.name(): __stddev,
__bias_mean.name(): __bias_mean,
__bias_stddev.name(): __bias_stddev
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_44 = CTD_ANON_44
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_45(pyxb.binding.basis.complexTypeDefinition):
"""
Noise parameters for linear accelerations.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 201, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element mean uses Python identifier mean
__mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mean'), 'mean',
'__AbsentNamespace0_CTD_ANON_45_mean', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
204, 20), )
mean = property(__mean.value, __mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ')
# Element stddev uses Python identifier stddev
__stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stddev'), 'stddev',
'__AbsentNamespace0_CTD_ANON_45_stddev', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 213, 20), )
stddev = property(__stddev.value, __stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ')
# Element bias_mean uses Python identifier bias_mean
__bias_mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bias_mean'), 'bias_mean',
'__AbsentNamespace0_CTD_ANON_45_bias_mean', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 222, 20), )
bias_mean = property(__bias_mean.value, __bias_mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which bias values are drawn.\n ')
# Element bias_stddev uses Python identifier bias_stddev
__bias_stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bias_stddev'),
'bias_stddev', '__AbsentNamespace0_CTD_ANON_45_bias_stddev',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/imu.xsd', 231, 20), )
bias_stddev = property(__bias_stddev.value, __bias_stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which bias values are drawn.\n ')
_ElementMap.update({
__mean.name(): __mean,
__stddev.name(): __stddev,
__bias_mean.name(): __bias_mean,
__bias_stddev.name(): __bias_stddev
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_45 = CTD_ANON_45
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_46(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element mass uses Python identifier mass
__mass = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mass'), 'mass',
'__AbsentNamespace0_CTD_ANON_46_mass', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 13, 8), )
mass = property(__mass.value, __mass.set, None, '\n The mass of the link.\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_46_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 22, 8), )
pose = property(__pose.value, __pose.set, None,
'\n This is the pose of the inertial reference frame, relative to the link reference frame. The origin of the inertial reference frame needs to be at the center of gravity. The axes of the inertial reference frame do not need to be aligned with the principal axes of the inertia.\n ')
# Element inertia uses Python identifier inertia
__inertia = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'inertia'), 'inertia',
'__AbsentNamespace0_CTD_ANON_46_inertia', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 31, 8), )
inertia = property(__inertia.value, __inertia.set, None,
'\n The 3x3 rotational inertia matrix. Because the rotational inertia matrix is symmetric, only 6 above-diagonal elements of this matrix are specified here, using the attributes ixx, ixy, ixz, iyy, iyz, izz.\n ')
_ElementMap.update({
__mass.name(): __mass,
__pose.name(): __pose,
__inertia.name(): __inertia
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_46 = CTD_ANON_46
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_47(pyxb.binding.basis.complexTypeDefinition):
"""
The 3x3 rotational inertia matrix. Because the rotational inertia matrix is symmetric, only 6 above-diagonal elements of this matrix are specified here, using the attributes ixx, ixy, ixz, iyy, iyz, izz.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 37, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element ixx uses Python identifier ixx
__ixx = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ixx'), 'ixx',
'__AbsentNamespace0_CTD_ANON_47_ixx', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 40, 14), )
ixx = property(__ixx.value, __ixx.set, None, None)
# Element ixy uses Python identifier ixy
__ixy = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ixy'), 'ixy',
'__AbsentNamespace0_CTD_ANON_47_ixy', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 44, 14), )
ixy = property(__ixy.value, __ixy.set, None, None)
# Element ixz uses Python identifier ixz
__ixz = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ixz'), 'ixz',
'__AbsentNamespace0_CTD_ANON_47_ixz', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 48, 14), )
ixz = property(__ixz.value, __ixz.set, None, None)
# Element iyy uses Python identifier iyy
__iyy = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'iyy'), 'iyy',
'__AbsentNamespace0_CTD_ANON_47_iyy', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 52, 14), )
iyy = property(__iyy.value, __iyy.set, None, None)
# Element iyz uses Python identifier iyz
__iyz = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'iyz'), 'iyz',
'__AbsentNamespace0_CTD_ANON_47_iyz', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 56, 14), )
iyz = property(__iyz.value, __iyz.set, None, None)
# Element izz uses Python identifier izz
__izz = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'izz'), 'izz',
'__AbsentNamespace0_CTD_ANON_47_izz', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 60, 14), )
izz = property(__izz.value, __izz.set, None, None)
_ElementMap.update({
__ixx.name(): __ixx,
__ixy.name(): __ixy,
__ixz.name(): __ixz,
__iyy.name(): __iyy,
__iyz.name(): __iyz,
__izz.name(): __izz
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_47 = CTD_ANON_47
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_48(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 11, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element parent uses Python identifier parent
__parent = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'parent'), 'parent',
'__AbsentNamespace0_CTD_ANON_48_parent', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 14, 8), )
parent = property(__parent.value, __parent.set, None, '\n Name of the parent link\n ')
# Element child uses Python identifier child
__child = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'child'), 'child',
'__AbsentNamespace0_CTD_ANON_48_child', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 23, 8), )
child = property(__child.value, __child.set, None, '\n Name of the child link\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_48_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 32, 8), )
pose = property(__pose.value, __pose.set, None,
'\n Pose offset from child link frame to joint frame (expressed in child link frame).\n ')
# Element gearbox_ratio uses Python identifier gearbox_ratio
__gearbox_ratio = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'gearbox_ratio'),
'gearbox_ratio',
'__AbsentNamespace0_CTD_ANON_48_gearbox_ratio', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 41, 8), )
gearbox_ratio = property(__gearbox_ratio.value, __gearbox_ratio.set, None,
'\n Parameter for gearbox joints. Given theta_1 and theta_2 defined in description for gearbox_reference_body, theta_2 = -gearbox_ratio * theta_1.\n ')
# Element gearbox_reference_body uses Python identifier gearbox_reference_body
__gearbox_reference_body = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'gearbox_reference_body'), 'gearbox_reference_body',
'__AbsentNamespace0_CTD_ANON_48_gearbox_reference_body', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 50, 8), )
gearbox_reference_body = property(__gearbox_reference_body.value, __gearbox_reference_body.set, None,
'\n Parameter for gearbox joints. Gearbox ratio is enforced over two joint angles. First joint angle (theta_1) is the angle from the gearbox_reference_body to the parent link in the direction of the axis element and the second joint angle (theta_2) is the angle from the gearbox_reference_body to the child link in the direction of the axis2 element.\n ')
# Element thread_pitch uses Python identifier thread_pitch
__thread_pitch = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'thread_pitch'),
'thread_pitch',
'__AbsentNamespace0_CTD_ANON_48_thread_pitch', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 59, 8), )
thread_pitch = property(__thread_pitch.value, __thread_pitch.set, None,
'\n Parameter for screw joints.\n ')
# Element axis uses Python identifier axis
__axis = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'axis'), 'axis',
'__AbsentNamespace0_CTD_ANON_48_axis', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 68, 8), )
axis = property(__axis.value, __axis.set, None,
'\n \n Parameters related to the axis of rotation for revolute joints,\n the axis of translation for prismatic joints.\n \n ')
# Element axis2 uses Python identifier axis2
__axis2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'axis2'), 'axis2',
'__AbsentNamespace0_CTD_ANON_48_axis2', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 226, 8), )
axis2 = property(__axis2.value, __axis2.set, None,
'\n \n Parameters related to the second axis of rotation for revolute2 joints and universal joints.\n \n ')
# Element physics uses Python identifier physics
__physics = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'physics'), 'physics',
'__AbsentNamespace0_CTD_ANON_48_physics', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 378, 8), )
physics = property(__physics.value, __physics.set, None,
'\n Parameters that are specific to a certain physics engine.\n ')
# Element sensor uses Python identifier sensor
__sensor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sensor'), 'sensor',
'__AbsentNamespace0_CTD_ANON_48_sensor', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 23, 2), )
sensor = property(__sensor.value, __sensor.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_48_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 569, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 569, 6)
name = property(__name.value, __name.set, None,
'\n A unique name for the joint within the scope of the model.\n ')
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_48_type', pyxb.binding.datatypes.string,
required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 576, 6)
__type._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 576, 6)
type = property(__type.value, __type.set, None,
'\n The type of joint, which must be one of the following:\n (revolute) a hinge joint that rotates on a single axis with either a fixed or continuous range of motion,\n (gearbox) geared revolute joints,\n (revolute2) same as two revolute joints connected in series,\n (prismatic) a sliding joint that slides along an axis with a limited range specified by upper and lower limits,\n (ball) a ball and socket joint,\n (screw) a single degree of freedom joint with coupled sliding and rotational motion,\n (universal) like a ball joint, but constrains one degree of freedom,\n (fixed) a joint with zero degrees of freedom that rigidly connects two links.\n \n ')
_ElementMap.update({
__parent.name(): __parent,
__child.name(): __child,
__pose.name(): __pose,
__gearbox_ratio.name(): __gearbox_ratio,
__gearbox_reference_body.name(): __gearbox_reference_body,
__thread_pitch.name(): __thread_pitch,
__axis.name(): __axis,
__axis2.name(): __axis2,
__physics.name(): __physics,
__sensor.name(): __sensor
})
_AttributeMap.update({
__name.name(): __name,
__type.name(): __type
})
_module_typeBindings.CTD_ANON_48 = CTD_ANON_48
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_49(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to the axis of rotation for revolute joints,
the axis of translation for prismatic joints.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 77, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element xyz uses Python identifier xyz
__xyz = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'xyz'), 'xyz',
'__AbsentNamespace0_CTD_ANON_49_xyz', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
80, 14), )
xyz = property(__xyz.value, __xyz.set, None,
'\n \n Represents the x,y,z components of the axis unit vector. The axis is\n expressed in the joint frame unless the use_parent_model_frame\n flag is set to true. The vector should be normalized.\n \n ')
# Element use_parent_model_frame uses Python identifier use_parent_model_frame
__use_parent_model_frame = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'use_parent_model_frame'), 'use_parent_model_frame',
'__AbsentNamespace0_CTD_ANON_49_use_parent_model_frame', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 93, 14), )
use_parent_model_frame = property(__use_parent_model_frame.value, __use_parent_model_frame.set, None,
'\n \n Flag to interpret the axis xyz element in the parent model frame instead\n of joint frame. Provided for Gazebo compatibility\n (see https://bitbucket.org/osrf/gazebo/issue/494 ).\n \n ')
# Element dynamics uses Python identifier dynamics
__dynamics = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'dynamics'), 'dynamics',
'__AbsentNamespace0_CTD_ANON_49_dynamics', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 106, 14), )
dynamics = property(__dynamics.value, __dynamics.set, None,
'\n An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.\n ')
# Element limit uses Python identifier limit
__limit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'limit'), 'limit',
'__AbsentNamespace0_CTD_ANON_49_limit', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 155, 14), )
limit = property(__limit.value, __limit.set, None,
'\n specifies the limits of this joint\n ')
_ElementMap.update({
__xyz.name(): __xyz,
__use_parent_model_frame.name(): __use_parent_model_frame,
__dynamics.name(): __dynamics,
__limit.name(): __limit
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_49 = CTD_ANON_49
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_50(pyxb.binding.basis.complexTypeDefinition):
"""
An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 112, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element damping uses Python identifier damping
__damping = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'damping'), 'damping',
'__AbsentNamespace0_CTD_ANON_50_damping', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 115, 20), )
damping = property(__damping.value, __damping.set, None,
'\n The physical velocity dependent viscous damping coefficient of the joint.\n ')
# Element friction uses Python identifier friction
__friction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'friction'), 'friction',
'__AbsentNamespace0_CTD_ANON_50_friction', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 124, 20), )
friction = property(__friction.value, __friction.set, None,
'\n The physical static friction value of the joint.\n ')
# Element spring_reference uses Python identifier spring_reference
__spring_reference = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'spring_reference'),
'spring_reference',
'__AbsentNamespace0_CTD_ANON_50_spring_reference',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 133, 20), )
spring_reference = property(__spring_reference.value, __spring_reference.set, None,
'\n The spring reference position for this joint axis.\n ')
# Element spring_stiffness uses Python identifier spring_stiffness
__spring_stiffness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'spring_stiffness'),
'spring_stiffness',
'__AbsentNamespace0_CTD_ANON_50_spring_stiffness',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 142, 20), )
spring_stiffness = property(__spring_stiffness.value, __spring_stiffness.set, None,
'\n The spring stiffness for this joint axis.\n ')
_ElementMap.update({
__damping.name(): __damping,
__friction.name(): __friction,
__spring_reference.name(): __spring_reference,
__spring_stiffness.name(): __spring_stiffness
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_50 = CTD_ANON_50
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_51(pyxb.binding.basis.complexTypeDefinition):
"""
specifies the limits of this joint
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 161, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element lower uses Python identifier lower
__lower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'lower'), 'lower',
'__AbsentNamespace0_CTD_ANON_51_lower', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 164, 20), )
lower = property(__lower.value, __lower.set, None,
'\n An attribute specifying the lower joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ')
# Element upper uses Python identifier upper
__upper = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'upper'), 'upper',
'__AbsentNamespace0_CTD_ANON_51_upper', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 173, 20), )
upper = property(__upper.value, __upper.set, None,
'\n An attribute specifying the upper joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ')
# Element effort uses Python identifier effort
__effort = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'effort'), 'effort',
'__AbsentNamespace0_CTD_ANON_51_effort', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 182, 20), )
effort = property(__effort.value, __effort.set, None,
'\n An attribute for enforcing the maximum joint effort applied by Joint::SetForce. Limit is not enforced if value is negative.\n ')
# Element velocity uses Python identifier velocity
__velocity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'velocity'), 'velocity',
'__AbsentNamespace0_CTD_ANON_51_velocity', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 191, 20), )
velocity = property(__velocity.value, __velocity.set, None,
'\n (not implemented) An attribute for enforcing the maximum joint velocity.\n ')
# Element stiffness uses Python identifier stiffness
__stiffness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stiffness'), 'stiffness',
'__AbsentNamespace0_CTD_ANON_51_stiffness', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 200, 20), )
stiffness = property(__stiffness.value, __stiffness.set, None,
'\n Joint stop stiffness. Support physics engines: SimBody.\n ')
# Element dissipation uses Python identifier dissipation
__dissipation = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'dissipation'),
'dissipation', '__AbsentNamespace0_CTD_ANON_51_dissipation',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 209, 20), )
dissipation = property(__dissipation.value, __dissipation.set, None,
'\n Joint stop dissipation.\n ')
_ElementMap.update({
__lower.name(): __lower,
__upper.name(): __upper,
__effort.name(): __effort,
__velocity.name(): __velocity,
__stiffness.name(): __stiffness,
__dissipation.name(): __dissipation
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_51 = CTD_ANON_51
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_52(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to the second axis of rotation for revolute2 joints and universal joints.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 234, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element xyz uses Python identifier xyz
__xyz = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'xyz'), 'xyz',
'__AbsentNamespace0_CTD_ANON_52_xyz', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
237, 14), )
xyz = property(__xyz.value, __xyz.set, None,
'\n \n Represents the x,y,z components of the axis unit vector. The axis is\n expressed in the joint frame unless the use_parent_model_frame\n flag is set to true. The vector should be normalized.\n \n ')
# Element use_parent_model_frame uses Python identifier use_parent_model_frame
__use_parent_model_frame = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'use_parent_model_frame'), 'use_parent_model_frame',
'__AbsentNamespace0_CTD_ANON_52_use_parent_model_frame', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 250, 14), )
use_parent_model_frame = property(__use_parent_model_frame.value, __use_parent_model_frame.set, None,
'\n \n Flag to interpret the axis xyz element in the parent model frame instead\n of joint frame. Provided for Gazebo compatibility\n (see https://bitbucket.org/osrf/gazebo/issue/494 ).\n \n ')
# Element dynamics uses Python identifier dynamics
__dynamics = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'dynamics'), 'dynamics',
'__AbsentNamespace0_CTD_ANON_52_dynamics', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 263, 14), )
dynamics = property(__dynamics.value, __dynamics.set, None,
'\n An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.\n ')
# Element limit uses Python identifier limit
__limit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'limit'), 'limit',
'__AbsentNamespace0_CTD_ANON_52_limit', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 312, 14), )
limit = property(__limit.value, __limit.set, None, None)
_ElementMap.update({
__xyz.name(): __xyz,
__use_parent_model_frame.name(): __use_parent_model_frame,
__dynamics.name(): __dynamics,
__limit.name(): __limit
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_52 = CTD_ANON_52
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_53(pyxb.binding.basis.complexTypeDefinition):
"""
An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 269, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element damping uses Python identifier damping
__damping = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'damping'), 'damping',
'__AbsentNamespace0_CTD_ANON_53_damping', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 272, 20), )
damping = property(__damping.value, __damping.set, None,
'\n The physical velocity dependent viscous damping coefficient of the joint. EXPERIMENTAL: if damping coefficient is negative and implicit_spring_damper is true, adaptive damping is used.\n ')
# Element friction uses Python identifier friction
__friction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'friction'), 'friction',
'__AbsentNamespace0_CTD_ANON_53_friction', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 281, 20), )
friction = property(__friction.value, __friction.set, None,
'\n The physical static friction value of the joint.\n ')
# Element spring_reference uses Python identifier spring_reference
__spring_reference = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'spring_reference'),
'spring_reference',
'__AbsentNamespace0_CTD_ANON_53_spring_reference',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 290, 20), )
spring_reference = property(__spring_reference.value, __spring_reference.set, None,
'\n The spring reference position for this joint axis.\n ')
# Element spring_stiffness uses Python identifier spring_stiffness
__spring_stiffness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'spring_stiffness'),
'spring_stiffness',
'__AbsentNamespace0_CTD_ANON_53_spring_stiffness',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 299, 20), )
spring_stiffness = property(__spring_stiffness.value, __spring_stiffness.set, None,
'\n The spring stiffness for this joint axis.\n ')
_ElementMap.update({
__damping.name(): __damping,
__friction.name(): __friction,
__spring_reference.name(): __spring_reference,
__spring_stiffness.name(): __spring_stiffness
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_53 = CTD_ANON_53
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_54(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 313, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element lower uses Python identifier lower
__lower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'lower'), 'lower',
'__AbsentNamespace0_CTD_ANON_54_lower', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 316, 20), )
lower = property(__lower.value, __lower.set, None,
'\n An attribute specifying the lower joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ')
# Element upper uses Python identifier upper
__upper = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'upper'), 'upper',
'__AbsentNamespace0_CTD_ANON_54_upper', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 325, 20), )
upper = property(__upper.value, __upper.set, None,
'\n An attribute specifying the upper joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ')
# Element effort uses Python identifier effort
__effort = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'effort'), 'effort',
'__AbsentNamespace0_CTD_ANON_54_effort', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 334, 20), )
effort = property(__effort.value, __effort.set, None,
'\n An attribute for enforcing the maximum joint effort applied by Joint::SetForce. Limit is not enforced if value is negative.\n ')
# Element velocity uses Python identifier velocity
__velocity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'velocity'), 'velocity',
'__AbsentNamespace0_CTD_ANON_54_velocity', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 343, 20), )
velocity = property(__velocity.value, __velocity.set, None,
'\n (not implemented) An attribute for enforcing the maximum joint velocity.\n ')
# Element stiffness uses Python identifier stiffness
__stiffness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stiffness'), 'stiffness',
'__AbsentNamespace0_CTD_ANON_54_stiffness', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 352, 20), )
stiffness = property(__stiffness.value, __stiffness.set, None,
'\n Joint stop stiffness. Supported physics engines: SimBody.\n ')
# Element dissipation uses Python identifier dissipation
__dissipation = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'dissipation'),
'dissipation', '__AbsentNamespace0_CTD_ANON_54_dissipation',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 361, 20), )
dissipation = property(__dissipation.value, __dissipation.set, None,
'\n Joint stop dissipation. Supported physics engines: SimBody.\n ')
_ElementMap.update({
__lower.name(): __lower,
__upper.name(): __upper,
__effort.name(): __effort,
__velocity.name(): __velocity,
__stiffness.name(): __stiffness,
__dissipation.name(): __dissipation
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_54 = CTD_ANON_54
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_55(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters that are specific to a certain physics engine.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 384, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element simbody uses Python identifier simbody
__simbody = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'simbody'), 'simbody',
'__AbsentNamespace0_CTD_ANON_55_simbody', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 387, 14), )
simbody = property(__simbody.value, __simbody.set, None,
'\n Simbody specific parameters\n ')
# Element ode uses Python identifier ode
__ode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ode'), 'ode',
'__AbsentNamespace0_CTD_ANON_55_ode', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
409, 14), )
ode = property(__ode.value, __ode.set, None, '\n ODE specific parameters\n ')
# Element provide_feedback uses Python identifier provide_feedback
__provide_feedback = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'provide_feedback'),
'provide_feedback',
'__AbsentNamespace0_CTD_ANON_55_provide_feedback',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 555, 14), )
provide_feedback = property(__provide_feedback.value, __provide_feedback.set, None,
'\n If provide feedback is set to true, physics engine will compute the constraint forces at this joint. For now, provide_feedback under ode block will override this tag and given user warning about the migration. provide_feedback under ode is scheduled to be removed in SDF 1.5.\n ')
_ElementMap.update({
__simbody.name(): __simbody,
__ode.name(): __ode,
__provide_feedback.name(): __provide_feedback
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_55 = CTD_ANON_55
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_56(pyxb.binding.basis.complexTypeDefinition):
"""
Simbody specific parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 393, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element must_be_loop_joint uses Python identifier must_be_loop_joint
__must_be_loop_joint = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'must_be_loop_joint'), 'must_be_loop_joint',
'__AbsentNamespace0_CTD_ANON_56_must_be_loop_joint', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 396, 20), )
must_be_loop_joint = property(__must_be_loop_joint.value, __must_be_loop_joint.set, None,
'\n Force cut in the multibody graph at this joint.\n ')
_ElementMap.update({
__must_be_loop_joint.name(): __must_be_loop_joint
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_56 = CTD_ANON_56
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_57(pyxb.binding.basis.complexTypeDefinition):
"""
ODE specific parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 415, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element provide_feedback uses Python identifier provide_feedback
__provide_feedback = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'provide_feedback'),
'provide_feedback',
'__AbsentNamespace0_CTD_ANON_57_provide_feedback',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 418, 20), )
provide_feedback = property(__provide_feedback.value, __provide_feedback.set, None,
'\n (DEPRECATION WARNING: In SDF 1.5 this tag will be replaced by the same tag directly under the physics-block. For now, this tag overrides the one outside of ode-block, but in SDF 1.5 this tag will be removed completely.) If provide feedback is set to true, ODE will compute the constraint forces at this joint.\n ')
# Element cfm_damping uses Python identifier cfm_damping
__cfm_damping = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'cfm_damping'),
'cfm_damping', '__AbsentNamespace0_CTD_ANON_57_cfm_damping',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 427, 20), )
cfm_damping = property(__cfm_damping.value, __cfm_damping.set, None,
'\n If cfm damping is set to true, ODE will use CFM to simulate damping, allows for infinite damping, and one additional constraint row (previously used for joint limit) is always active.\n ')
# Element implicit_spring_damper uses Python identifier implicit_spring_damper
__implicit_spring_damper = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'implicit_spring_damper'), 'implicit_spring_damper',
'__AbsentNamespace0_CTD_ANON_57_implicit_spring_damper', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 436, 20), )
implicit_spring_damper = property(__implicit_spring_damper.value, __implicit_spring_damper.set, None,
'\n If implicit_spring_damper is set to true, ODE will use CFM, ERP to simulate stiffness and damping, allows for infinite damping, and one additional constraint row (previously used for joint limit) is always active. This replaces cfm_damping parameter in sdf 1.4.\n ')
# Element fudge_factor uses Python identifier fudge_factor
__fudge_factor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'fudge_factor'),
'fudge_factor',
'__AbsentNamespace0_CTD_ANON_57_fudge_factor', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 445, 20), )
fudge_factor = property(__fudge_factor.value, __fudge_factor.set, None,
'\n Scale the excess for in a joint motor at joint limits. Should be between zero and one.\n ')
# Element cfm uses Python identifier cfm
__cfm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'cfm'), 'cfm',
'__AbsentNamespace0_CTD_ANON_57_cfm', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
454, 20), )
cfm = property(__cfm.value, __cfm.set, None,
'\n Constraint force mixing for constrained directions\n ')
# Element erp uses Python identifier erp
__erp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'erp'), 'erp',
'__AbsentNamespace0_CTD_ANON_57_erp', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
463, 20), )
erp = property(__erp.value, __erp.set, None,
'\n Error reduction parameter for constrained directions\n ')
# Element bounce uses Python identifier bounce
__bounce = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bounce'), 'bounce',
'__AbsentNamespace0_CTD_ANON_57_bounce', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 472, 20), )
bounce = property(__bounce.value, __bounce.set, None,
'\n Bounciness of the limits\n ')
# Element max_force uses Python identifier max_force
__max_force = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_force'), 'max_force',
'__AbsentNamespace0_CTD_ANON_57_max_force', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 481, 20), )
max_force = property(__max_force.value, __max_force.set, None,
'\n Maximum force or torque used to reach the desired velocity.\n ')
# Element velocity uses Python identifier velocity
__velocity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'velocity'), 'velocity',
'__AbsentNamespace0_CTD_ANON_57_velocity', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 490, 20), )
velocity = property(__velocity.value, __velocity.set, None,
'\n The desired velocity of the joint. Should only be set if you want the joint to move on load.\n ')
# Element limit uses Python identifier limit
__limit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'limit'), 'limit',
'__AbsentNamespace0_CTD_ANON_57_limit', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 499, 20), )
limit = property(__limit.value, __limit.set, None, None)
# Element suspension uses Python identifier suspension
__suspension = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'suspension'),
'suspension', '__AbsentNamespace0_CTD_ANON_57_suspension',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 525, 20), )
suspension = property(__suspension.value, __suspension.set, None, None)
_ElementMap.update({
__provide_feedback.name(): __provide_feedback,
__cfm_damping.name(): __cfm_damping,
__implicit_spring_damper.name(): __implicit_spring_damper,
__fudge_factor.name(): __fudge_factor,
__cfm.name(): __cfm,
__erp.name(): __erp,
__bounce.name(): __bounce,
__max_force.name(): __max_force,
__velocity.name(): __velocity,
__limit.name(): __limit,
__suspension.name(): __suspension
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_57 = CTD_ANON_57
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_58(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 500, 22)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element cfm uses Python identifier cfm
__cfm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'cfm'), 'cfm',
'__AbsentNamespace0_CTD_ANON_58_cfm', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
503, 26), )
cfm = property(__cfm.value, __cfm.set, None,
'\n Constraint force mixing parameter used by the joint stop\n ')
# Element erp uses Python identifier erp
__erp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'erp'), 'erp',
'__AbsentNamespace0_CTD_ANON_58_erp', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
512, 26), )
erp = property(__erp.value, __erp.set, None,
'\n Error reduction parameter used by the joint stop\n ')
_ElementMap.update({
__cfm.name(): __cfm,
__erp.name(): __erp
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_58 = CTD_ANON_58
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_59(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 526, 22)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element cfm uses Python identifier cfm
__cfm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'cfm'), 'cfm',
'__AbsentNamespace0_CTD_ANON_59_cfm', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
529, 26), )
cfm = property(__cfm.value, __cfm.set, None,
'\n Suspension constraint force mixing parameter\n ')
# Element erp uses Python identifier erp
__erp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'erp'), 'erp',
'__AbsentNamespace0_CTD_ANON_59_erp', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd',
538, 26), )
erp = property(__erp.value, __erp.set, None,
'\n Suspension error reduction parameter\n ')
_ElementMap.update({
__cfm.name(): __cfm,
__erp.name(): __erp
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_59 = CTD_ANON_59
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_60(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 17, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element audio_sink uses Python identifier audio_sink
__audio_sink = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'audio_sink'),
'audio_sink', '__AbsentNamespace0_CTD_ANON_60_audio_sink',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_sink.xsd', 9, 2), )
audio_sink = property(__audio_sink.value, __audio_sink.set, None, None)
# Element audio_source uses Python identifier audio_source
__audio_source = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'audio_source'),
'audio_source',
'__AbsentNamespace0_CTD_ANON_60_audio_source', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 9,
2), )
audio_source = property(__audio_source.value, __audio_source.set, None, None)
# Element collision uses Python identifier collision
__collision = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'collision'),
'collision', '__AbsentNamespace0_CTD_ANON_60_collision', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/collision.xsd', 11, 2), )
collision = property(__collision.value, __collision.set, None, None)
# Element inertial uses Python identifier inertial
__inertial = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'inertial'), 'inertial',
'__AbsentNamespace0_CTD_ANON_60_inertial', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 9, 2), )
inertial = property(__inertial.value, __inertial.set, None, None)
# Element gravity uses Python identifier gravity
__gravity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'gravity'), 'gravity',
'__AbsentNamespace0_CTD_ANON_60_gravity', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 20, 8), )
gravity = property(__gravity.value, __gravity.set, None,
'\n If true, the link is affected by gravity.\n ')
# Element self_collide uses Python identifier self_collide
__self_collide = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'self_collide'),
'self_collide',
'__AbsentNamespace0_CTD_ANON_60_self_collide', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 29, 8), )
self_collide = property(__self_collide.value, __self_collide.set, None,
'\n If true, the link can collide with other links in the model. Two links within a model will collide if link1.self_collide OR link2.self_collide. Links connected by a joint will never collide.\n ')
# Element kinematic uses Python identifier kinematic
__kinematic = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'kinematic'), 'kinematic',
'__AbsentNamespace0_CTD_ANON_60_kinematic', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 38, 8), )
kinematic = property(__kinematic.value, __kinematic.set, None,
'\n If true, the link is kinematic only\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_60_pose', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd',
47, 8), )
pose = property(__pose.value, __pose.set, None,
'\n This is the pose of the link reference frame, relative to the model reference frame.\n ')
# Element must_be_base_link uses Python identifier must_be_base_link
__must_be_base_link = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'must_be_base_link'), 'must_be_base_link',
'__AbsentNamespace0_CTD_ANON_60_must_be_base_link', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 56, 8), )
must_be_base_link = property(__must_be_base_link.value, __must_be_base_link.set, None,
'\n If true, the link will have 6DOF and be a direct child of world.\n ')
# Element velocity_decay uses Python identifier velocity_decay
__velocity_decay = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'velocity_decay'),
'velocity_decay',
'__AbsentNamespace0_CTD_ANON_60_velocity_decay', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 65, 8), )
velocity_decay = property(__velocity_decay.value, __velocity_decay.set, None,
"\n Exponential damping of the link's velocity.\n ")
# Element projector uses Python identifier projector
__projector = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'projector'),
'projector', '__AbsentNamespace0_CTD_ANON_60_projector', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 5, 2), )
projector = property(__projector.value, __projector.set, None, None)
# Element sensor uses Python identifier sensor
__sensor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sensor'), 'sensor',
'__AbsentNamespace0_CTD_ANON_60_sensor', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 23, 2), )
sensor = property(__sensor.value, __sensor.set, None, None)
# Element visual uses Python identifier visual
__visual = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'visual'), 'visual',
'__AbsentNamespace0_CTD_ANON_60_visual', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 12, 2), )
visual = property(__visual.value, __visual.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_60_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 103, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 103, 6)
name = property(__name.value, __name.set, None,
'\n A unique name for the link within the scope of the model.\n ')
_ElementMap.update({
__audio_sink.name(): __audio_sink,
__audio_source.name(): __audio_source,
__collision.name(): __collision,
__inertial.name(): __inertial,
__gravity.name(): __gravity,
__self_collide.name(): __self_collide,
__kinematic.name(): __kinematic,
__pose.name(): __pose,
__must_be_base_link.name(): __must_be_base_link,
__velocity_decay.name(): __velocity_decay,
__projector.name(): __projector,
__sensor.name(): __sensor,
__visual.name(): __visual
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_60 = CTD_ANON_60
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_61(pyxb.binding.basis.complexTypeDefinition):
"""
Exponential damping of the link's velocity.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 71, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linear uses Python identifier linear
__linear = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'linear'), 'linear',
'__AbsentNamespace0_CTD_ANON_61_linear', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 74, 14), )
linear = property(__linear.value, __linear.set, None, '\n Linear damping\n ')
# Element angular uses Python identifier angular
__angular = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'angular'), 'angular',
'__AbsentNamespace0_CTD_ANON_61_angular', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 83, 14), )
angular = property(__angular.value, __angular.set, None,
'\n Angular damping\n ')
_ElementMap.update({
__linear.name(): __linear,
__angular.name(): __angular
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_61 = CTD_ANON_61
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_62(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element near uses Python identifier near
__near = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'near'), 'near',
'__AbsentNamespace0_CTD_ANON_62_near', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 13, 8), )
near = property(__near.value, __near.set, None,
'\n Near clipping distance of the view frustum\n ')
# Element far uses Python identifier far
__far = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'far'), 'far',
'__AbsentNamespace0_CTD_ANON_62_far', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 22, 8), )
far = property(__far.value, __far.set, None,
'\n Far clipping distance of the view frustum\n ')
# Element aspect_ratio uses Python identifier aspect_ratio
__aspect_ratio = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'aspect_ratio'),
'aspect_ratio',
'__AbsentNamespace0_CTD_ANON_62_aspect_ratio', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 31,
8), )
aspect_ratio = property(__aspect_ratio.value, __aspect_ratio.set, None,
'\n Aspect ratio of the near and far planes. This is the width divided by the height of the near or far planes.\n ')
# Element horizontal_fov uses Python identifier horizontal_fov
__horizontal_fov = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'horizontal_fov'),
'horizontal_fov',
'__AbsentNamespace0_CTD_ANON_62_horizontal_fov', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 40,
8), )
horizontal_fov = property(__horizontal_fov.value, __horizontal_fov.set, None,
"\n Horizontal field of view of the frustum, in radians. This is the angle between the frustum's vertex and the edges of the near or far plane.\n ")
_ElementMap.update({
__near.name(): __near,
__far.name(): __far,
__aspect_ratio.name(): __aspect_ratio,
__horizontal_fov.name(): __horizontal_fov
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_62 = CTD_ANON_62
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_63(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element x uses Python identifier x
__x = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'x'), 'x',
'__AbsentNamespace0_CTD_ANON_63_x', True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/magnetometer.xsd', 13, 8), )
x = property(__x.value, __x.set, None,
'\n \n Parameters related to the body-frame X axis of the magnetometer\n \n ')
# Element y uses Python identifier y
__y = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'y'), 'y',
'__AbsentNamespace0_CTD_ANON_63_y', True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/magnetometer.xsd', 28, 8), )
y = property(__y.value, __y.set, None,
'\n \n Parameters related to the body-frame Y axis of the magnetometer\n \n ')
# Element z uses Python identifier z
__z = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'z'), 'z',
'__AbsentNamespace0_CTD_ANON_63_z', True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/magnetometer.xsd', 43, 8), )
z = property(__z.value, __z.set, None,
'\n \n Parameters related to the body-frame Z axis of the magnetometer\n \n ')
_ElementMap.update({
__x.name(): __x,
__y.name(): __y,
__z.name(): __z
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_63 = CTD_ANON_63
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_64(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to the body-frame X axis of the magnetometer
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 21, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_64 = CTD_ANON_64
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_65(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to the body-frame Y axis of the magnetometer
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 36, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_65 = CTD_ANON_65
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_66(pyxb.binding.basis.complexTypeDefinition):
"""
Parameters related to the body-frame Z axis of the magnetometer
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 51, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_66 = CTD_ANON_66
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_67(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element script uses Python identifier script
__script = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'script'), 'script',
'__AbsentNamespace0_CTD_ANON_67_script', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 13, 8), )
script = property(__script.value, __script.set, None,
'\n Name of material from an installed script file. This will override the color element if the script exists.\n ')
# Element shader uses Python identifier shader
__shader = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'shader'), 'shader',
'__AbsentNamespace0_CTD_ANON_67_shader', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 44, 8), )
shader = property(__shader.value, __shader.set, None, None)
# Element lighting uses Python identifier lighting
__lighting = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'lighting'), 'lighting',
'__AbsentNamespace0_CTD_ANON_67_lighting', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 68, 8), )
lighting = property(__lighting.value, __lighting.set, None,
'\n If false, dynamic lighting will be disabled\n ')
# Element ambient uses Python identifier ambient
__ambient = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ambient'), 'ambient',
'__AbsentNamespace0_CTD_ANON_67_ambient', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 77, 8), )
ambient = property(__ambient.value, __ambient.set, None,
'\n The ambient color of a material specified by set of four numbers representing red/green/blue, each in the range of [0,1].\n ')
# Element diffuse uses Python identifier diffuse
__diffuse = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'diffuse'), 'diffuse',
'__AbsentNamespace0_CTD_ANON_67_diffuse', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 86, 8), )
diffuse = property(__diffuse.value, __diffuse.set, None,
'\n The diffuse color of a material specified by set of four numbers representing red/green/blue/alpha, each in the range of [0,1].\n ')
# Element specular uses Python identifier specular
__specular = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'specular'), 'specular',
'__AbsentNamespace0_CTD_ANON_67_specular', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 95, 8), )
specular = property(__specular.value, __specular.set, None,
'\n The specular color of a material specified by set of four numbers representing red/green/blue/alpha, each in the range of [0,1].\n ')
# Element emissive uses Python identifier emissive
__emissive = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'emissive'), 'emissive',
'__AbsentNamespace0_CTD_ANON_67_emissive', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 104, 8), )
emissive = property(__emissive.value, __emissive.set, None,
'\n The emissive color of a material specified by set of four numbers representing red/green/blue, each in the range of [0,1].\n ')
_ElementMap.update({
__script.name(): __script,
__shader.name(): __shader,
__lighting.name(): __lighting,
__ambient.name(): __ambient,
__diffuse.name(): __diffuse,
__specular.name(): __specular,
__emissive.name(): __emissive
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_67 = CTD_ANON_67
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_68(pyxb.binding.basis.complexTypeDefinition):
"""
Name of material from an installed script file. This will override the color element if the script exists.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 19, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON_68_uri', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 22, 14), )
uri = property(__uri.value, __uri.set, None,
'\n URI of the material script file\n ')
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_68_name', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 31, 14), )
name = property(__name.value, __name.set, None,
'\n Name of the script within the script file\n ')
_ElementMap.update({
__uri.name(): __uri,
__name.name(): __name
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_68 = CTD_ANON_68
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_69(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 45, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element normal_map uses Python identifier normal_map
__normal_map = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'normal_map'),
'normal_map', '__AbsentNamespace0_CTD_ANON_69_normal_map',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 48, 14), )
normal_map = property(__normal_map.value, __normal_map.set, None,
'\n filename of the normal map\n ')
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_69_type', pyxb.binding.datatypes.string,
required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 57, 12)
__type._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 57, 12)
type = property(__type.value, __type.set, None,
'\n vertex, pixel, normal_map_objectspace, normal_map_tangentspace\n ')
_ElementMap.update({
__normal_map.name(): __normal_map
})
_AttributeMap.update({
__type.name(): __type
})
_module_typeBindings.CTD_ANON_69 = CTD_ANON_69
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_70(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'uri'), 'uri',
'__AbsentNamespace0_CTD_ANON_70_uri', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 13, 8), )
uri = property(__uri.value, __uri.set, None, '\n Mesh uri\n ')
# Element submesh uses Python identifier submesh
__submesh = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'submesh'), 'submesh',
'__AbsentNamespace0_CTD_ANON_70_submesh', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 22, 8), )
submesh = property(__submesh.value, __submesh.set, None,
'\n Use a named submesh. The submesh must exist in the mesh specified by the uri\n ')
# Element scale uses Python identifier scale
__scale = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'scale'), 'scale',
'__AbsentNamespace0_CTD_ANON_70_scale', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 53, 8), )
scale = property(__scale.value, __scale.set, None,
'\n Scaling factor applied to the mesh\n ')
_ElementMap.update({
__uri.name(): __uri,
__submesh.name(): __submesh,
__scale.name(): __scale
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_70 = CTD_ANON_70
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_71(pyxb.binding.basis.complexTypeDefinition):
"""
Use a named submesh. The submesh must exist in the mesh specified by the uri
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 28, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_71_name', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 31, 14), )
name = property(__name.value, __name.set, None,
'\n Name of the submesh within the parent mesh\n ')
# Element center uses Python identifier center
__center = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'center'), 'center',
'__AbsentNamespace0_CTD_ANON_71_center', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/mesh_shape.xsd', 40, 14), )
center = property(__center.value, __center.set, None,
'\n Set to true to center the vertices of the submesh at 0,0,0. This will effectively remove any transformations on the submesh before the poses from parent links and models are applied.\n ')
_ElementMap.update({
__name.name(): __name,
__center.name(): __center
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_71 = CTD_ANON_71
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_72(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element normal uses Python identifier normal
__normal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'normal'), 'normal',
'__AbsentNamespace0_CTD_ANON_72_normal', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plane_shape.xsd', 13, 8), )
normal = property(__normal.value, __normal.set, None,
'\n Normal direction for the plane\n ')
# Element size uses Python identifier size
__size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'size'), 'size',
'__AbsentNamespace0_CTD_ANON_72_size', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plane_shape.xsd', 22, 8), )
size = property(__size.value, __size.set, None, '\n Length of each side of the plane\n ')
_ElementMap.update({
__normal.name(): __normal,
__size.name(): __size
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_72 = CTD_ANON_72
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_73(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_73_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 14, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 14, 6)
name = property(__name.value, __name.set, None,
'\n A unique name for the plugin, scoped to its parent.\n ')
# Attribute filename uses Python identifier filename
__filename = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'filename'), 'filename',
'__AbsentNamespace0_CTD_ANON_73_filename',
pyxb.binding.datatypes.string, required=True)
__filename._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 21, 6)
__filename._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 21, 6)
filename = property(__filename.value, __filename.set, None,
'\n Name of the shared library to load. If the filename is not a full path name, the file will be searched for in the configuration paths.\n ')
_HasWildcardElement = True
_ElementMap.update({
})
_AttributeMap.update({
__name.name(): __name,
__filename.name(): __filename
})
_module_typeBindings.CTD_ANON_73 = CTD_ANON_73
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_74(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/polyline_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element point uses Python identifier point
__point = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'point'), 'point',
'__AbsentNamespace0_CTD_ANON_74_point', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 13, 8), )
point = property(__point.value, __point.set, None,
'\n \n A series of points that define the path of the polyline.\n \n ')
# Element height uses Python identifier height
__height = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'height'), 'height',
'__AbsentNamespace0_CTD_ANON_74_height', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 24, 8), )
height = property(__height.value, __height.set, None, '\n Height of the polyline\n ')
_ElementMap.update({
__point.name(): __point,
__height.name(): __height
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_74 = CTD_ANON_74
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_75(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 6, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element plugin uses Python identifier plugin
__plugin = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'plugin'), 'plugin',
'__AbsentNamespace0_CTD_ANON_75_plugin', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plugin.xsd', 9, 2), )
plugin = property(__plugin.value, __plugin.set, None, None)
# Element texture uses Python identifier texture
__texture = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'texture'), 'texture',
'__AbsentNamespace0_CTD_ANON_75_texture', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 9, 8), )
texture = property(__texture.value, __texture.set, None, '\n Texture name\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_75_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 18, 8), )
pose = property(__pose.value, __pose.set, None, '\n Pose of the projector\n ')
# Element fov uses Python identifier fov
__fov = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'fov'), 'fov',
'__AbsentNamespace0_CTD_ANON_75_fov', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 27, 8), )
fov = property(__fov.value, __fov.set, None, '\n Field of view\n ')
# Element near_clip uses Python identifier near_clip
__near_clip = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'near_clip'), 'near_clip',
'__AbsentNamespace0_CTD_ANON_75_near_clip', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 36, 8), )
near_clip = property(__near_clip.value, __near_clip.set, None, '\n Near clip distance\n ')
# Element far_clip uses Python identifier far_clip
__far_clip = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'far_clip'), 'far_clip',
'__AbsentNamespace0_CTD_ANON_75_far_clip', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 45, 8), )
far_clip = property(__far_clip.value, __far_clip.set, None, '\n far clip distance\n ')
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_75_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 55, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 55, 6)
name = property(__name.value, __name.set, None, '\n Name of the projector\n ')
_ElementMap.update({
__plugin.name(): __plugin,
__texture.name(): __texture,
__pose.name(): __pose,
__fov.name(): __fov,
__near_clip.name(): __near_clip,
__far_clip.name(): __far_clip
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_75 = CTD_ANON_75
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_76(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element scan uses Python identifier scan
__scan = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'scan'), 'scan',
'__AbsentNamespace0_CTD_ANON_76_scan', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
13, 8), )
scan = property(__scan.value, __scan.set, None, None)
# Element range uses Python identifier range
__range = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'range'), 'range',
'__AbsentNamespace0_CTD_ANON_76_range', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
99, 8), )
range = property(__range.value, __range.set, None,
'\n specifies range properties of each simulated ray\n ')
# Element noise uses Python identifier noise
__noise = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'noise'), 'noise',
'__AbsentNamespace0_CTD_ANON_76_noise', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
139, 8), )
noise = property(__noise.value, __noise.set, None,
'\n The properties of the noise model that should be applied to generated scans\n ')
_ElementMap.update({
__scan.name(): __scan,
__range.name(): __range,
__noise.name(): __noise
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_76 = CTD_ANON_76
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_77(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 14, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element horizontal uses Python identifier horizontal
__horizontal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'horizontal'),
'horizontal', '__AbsentNamespace0_CTD_ANON_77_horizontal',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 17, 14), )
horizontal = property(__horizontal.value, __horizontal.set, None, None)
# Element vertical uses Python identifier vertical
__vertical = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'vertical'), 'vertical',
'__AbsentNamespace0_CTD_ANON_77_vertical', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 56, 14), )
vertical = property(__vertical.value, __vertical.set, None, None)
_ElementMap.update({
__horizontal.name(): __horizontal,
__vertical.name(): __vertical
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_77 = CTD_ANON_77
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_78(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 18, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element samples uses Python identifier samples
__samples = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'samples'), 'samples',
'__AbsentNamespace0_CTD_ANON_78_samples', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 21, 20), )
samples = property(__samples.value, __samples.set, None,
'\n The number of simulated rays to generate per complete laser sweep cycle.\n ')
# Element resolution uses Python identifier resolution
__resolution = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'resolution'),
'resolution', '__AbsentNamespace0_CTD_ANON_78_resolution',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 30, 20), )
resolution = property(__resolution.value, __resolution.set, None,
'\n This number is multiplied by samples to determine the number of range data points returned. If resolution is less than one, range data is interpolated. If resolution is greater than one, range data is averaged.\n ')
# Element min_angle uses Python identifier min_angle
__min_angle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_angle'), 'min_angle',
'__AbsentNamespace0_CTD_ANON_78_min_angle', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 39, 20), )
min_angle = property(__min_angle.value, __min_angle.set, None, None)
# Element max_angle uses Python identifier max_angle
__max_angle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_angle'), 'max_angle',
'__AbsentNamespace0_CTD_ANON_78_max_angle', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 43, 20), )
max_angle = property(__max_angle.value, __max_angle.set, None,
'\n Must be greater or equal to min_angle\n ')
_ElementMap.update({
__samples.name(): __samples,
__resolution.name(): __resolution,
__min_angle.name(): __min_angle,
__max_angle.name(): __max_angle
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_78 = CTD_ANON_78
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_79(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 57, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element samples uses Python identifier samples
__samples = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'samples'), 'samples',
'__AbsentNamespace0_CTD_ANON_79_samples', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 60, 20), )
samples = property(__samples.value, __samples.set, None,
'\n The number of simulated rays to generate per complete laser sweep cycle.\n ')
# Element resolution uses Python identifier resolution
__resolution = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'resolution'),
'resolution', '__AbsentNamespace0_CTD_ANON_79_resolution',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 69, 20), )
resolution = property(__resolution.value, __resolution.set, None,
'\n This number is multiplied by samples to determine the number of range data points returned. If resolution is less than one, range data is interpolated. If resolution is greater than one, range data is averaged.\n ')
# Element min_angle uses Python identifier min_angle
__min_angle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_angle'), 'min_angle',
'__AbsentNamespace0_CTD_ANON_79_min_angle', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 78, 20), )
min_angle = property(__min_angle.value, __min_angle.set, None, None)
# Element max_angle uses Python identifier max_angle
__max_angle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_angle'), 'max_angle',
'__AbsentNamespace0_CTD_ANON_79_max_angle', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 82, 20), )
max_angle = property(__max_angle.value, __max_angle.set, None,
'\n Must be greater or equal to min_angle\n ')
_ElementMap.update({
__samples.name(): __samples,
__resolution.name(): __resolution,
__min_angle.name(): __min_angle,
__max_angle.name(): __max_angle
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_79 = CTD_ANON_79
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_80(pyxb.binding.basis.complexTypeDefinition):
"""
specifies range properties of each simulated ray
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 105, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element min uses Python identifier min
__min = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min'), 'min',
'__AbsentNamespace0_CTD_ANON_80_min', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
108, 14), )
min = property(__min.value, __min.set, None,
'\n The minimum distance for each ray.\n ')
# Element max uses Python identifier max
__max = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max'), 'max',
'__AbsentNamespace0_CTD_ANON_80_max', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
117, 14), )
max = property(__max.value, __max.set, None,
'\n The maximum distance for each ray.\n ')
# Element resolution uses Python identifier resolution
__resolution = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'resolution'),
'resolution', '__AbsentNamespace0_CTD_ANON_80_resolution',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 126, 14), )
resolution = property(__resolution.value, __resolution.set, None,
'\n Linear resolution of each ray.\n ')
_ElementMap.update({
__min.name(): __min,
__max.name(): __max,
__resolution.name(): __resolution
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_80 = CTD_ANON_80
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_81(pyxb.binding.basis.complexTypeDefinition):
"""
The properties of the noise model that should be applied to generated scans
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 145, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element type uses Python identifier type
__type = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_81_type', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
148, 14), )
type = property(__type.value, __type.set, None,
'\n The type of noise. Currently supported types are: "gaussian" (draw noise values independently for each beam from a Gaussian distribution).\n ')
# Element mean uses Python identifier mean
__mean = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mean'), 'mean',
'__AbsentNamespace0_CTD_ANON_81_mean', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
157, 14), )
mean = property(__mean.value, __mean.set, None,
'\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ')
# Element stddev uses Python identifier stddev
__stddev = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stddev'), 'stddev',
'__AbsentNamespace0_CTD_ANON_81_stddev', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/ray.xsd', 166, 14), )
stddev = property(__stddev.value, __stddev.set, None,
'\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ')
_ElementMap.update({
__type.name(): __type,
__mean.name(): __mean,
__stddev.name(): __stddev
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_81 = CTD_ANON_81
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_82(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 24, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element altimeter uses Python identifier altimeter
__altimeter = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'altimeter'),
'altimeter', '__AbsentNamespace0_CTD_ANON_82_altimeter', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/altimeter.xsd', 9, 2), )
altimeter = property(__altimeter.value, __altimeter.set, None, None)
# Element camera uses Python identifier camera
__camera = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'camera'), 'camera',
'__AbsentNamespace0_CTD_ANON_82_camera', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 9, 2), )
camera = property(__camera.value, __camera.set, None, None)
# Element contact uses Python identifier contact
__contact = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'contact'), 'contact',
'__AbsentNamespace0_CTD_ANON_82_contact', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/contact.xsd', 9, 2), )
contact = property(__contact.value, __contact.set, None, None)
# Element force_torque uses Python identifier force_torque
__force_torque = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'force_torque'),
'force_torque',
'__AbsentNamespace0_CTD_ANON_82_force_torque', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/forcetorque.xsd', 9, 2), )
force_torque = property(__force_torque.value, __force_torque.set, None, None)
# Element gps uses Python identifier gps
__gps = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gps'), 'gps',
'__AbsentNamespace0_CTD_ANON_82_gps', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd',
9, 2), )
gps = property(__gps.value, __gps.set, None, None)
# Element imu uses Python identifier imu
__imu = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'imu'), 'imu',
'__AbsentNamespace0_CTD_ANON_82_imu', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd',
9, 2), )
imu = property(__imu.value, __imu.set, None, None)
# Element logical_camera uses Python identifier logical_camera
__logical_camera = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'logical_camera'),
'logical_camera',
'__AbsentNamespace0_CTD_ANON_82_logical_camera', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 9,
2), )
logical_camera = property(__logical_camera.value, __logical_camera.set, None, None)
# Element magnetometer uses Python identifier magnetometer
__magnetometer = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'magnetometer'),
'magnetometer',
'__AbsentNamespace0_CTD_ANON_82_magnetometer', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/magnetometer.xsd', 9,
2), )
magnetometer = property(__magnetometer.value, __magnetometer.set, None, None)
# Element plugin uses Python identifier plugin
__plugin = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'plugin'), 'plugin',
'__AbsentNamespace0_CTD_ANON_82_plugin', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plugin.xsd', 9, 2), )
plugin = property(__plugin.value, __plugin.set, None, None)
# Element ray uses Python identifier ray
__ray = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ray'), 'ray',
'__AbsentNamespace0_CTD_ANON_82_ray', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd',
9, 2), )
ray = property(__ray.value, __ray.set, None, None)
# Element rfidtag uses Python identifier rfidtag
__rfidtag = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'rfidtag'), 'rfidtag',
'__AbsentNamespace0_CTD_ANON_82_rfidtag', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/rfid.xsd', 4, 2), )
rfidtag = property(__rfidtag.value, __rfidtag.set, None, None)
# Element rfid uses Python identifier rfid
__rfid = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'rfid'), 'rfid',
'__AbsentNamespace0_CTD_ANON_82_rfid', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/rfidtag.xsd', 4, 2), )
rfid = property(__rfid.value, __rfid.set, None, None)
# Element always_on uses Python identifier always_on
__always_on = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'always_on'), 'always_on',
'__AbsentNamespace0_CTD_ANON_82_always_on', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 27, 8), )
always_on = property(__always_on.value, __always_on.set, None,
'\n If true the sensor will always be updated according to the update rate.\n ')
# Element update_rate uses Python identifier update_rate
__update_rate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'update_rate'),
'update_rate', '__AbsentNamespace0_CTD_ANON_82_update_rate',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 36, 8), )
update_rate = property(__update_rate.value, __update_rate.set, None,
'\n The frequency at which the sensor data is generated. If left unspecified, the sensor will generate data every cycle.\n ')
# Element visualize uses Python identifier visualize
__visualize = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'visualize'), 'visualize',
'__AbsentNamespace0_CTD_ANON_82_visualize', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 45, 8), )
visualize = property(__visualize.value, __visualize.set, None,
'\n If true, the sensor is visualized in the GUI\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_82_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 54, 8), )
pose = property(__pose.value, __pose.set, None,
'\n This is the pose of the sensor, relative to the parent (link or joint) reference frame.\n ')
# Element topic uses Python identifier topic
__topic = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'topic'), 'topic',
'__AbsentNamespace0_CTD_ANON_82_topic', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 63, 8), )
topic = property(__topic.value, __topic.set, None,
'\n Name of the topic on which data is published. This is necessary for visualization\n ')
# Element sonar uses Python identifier sonar
__sonar = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sonar'), 'sonar',
'__AbsentNamespace0_CTD_ANON_82_sonar', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sonar.xsd', 9, 2), )
sonar = property(__sonar.value, __sonar.set, None, None)
# Element transceiver uses Python identifier transceiver
__transceiver = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transceiver'),
'transceiver', '__AbsentNamespace0_CTD_ANON_82_transceiver',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 9, 2), )
transceiver = property(__transceiver.value, __transceiver.set, None, None)
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_82_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 86, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 86, 6)
name = property(__name.value, __name.set, None,
'\n A unique name for the sensor. This name must not match another model in the model.\n ')
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type',
'__AbsentNamespace0_CTD_ANON_82_type', pyxb.binding.datatypes.string,
required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 93, 6)
__type._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 93, 6)
type = property(__type.value, __type.set, None,
'\n The type name of the sensor. By default, SDF supports types\n altimeter,\n camera,\n contact,\n depth,\n force_torque,\n gps,\n gpu_ray,\n imu,\n logical_camera,\n magnetometer,\n multicamera,\n ray,\n rfid,\n rfidtag,\n sonar,\n wireless_receiver, and\n wireless_transmitter.\n ')
_ElementMap.update({
__altimeter.name(): __altimeter,
__camera.name(): __camera,
__contact.name(): __contact,
__force_torque.name(): __force_torque,
__gps.name(): __gps,
__imu.name(): __imu,
__logical_camera.name(): __logical_camera,
__magnetometer.name(): __magnetometer,
__plugin.name(): __plugin,
__ray.name(): __ray,
__rfidtag.name(): __rfidtag,
__rfid.name(): __rfid,
__always_on.name(): __always_on,
__update_rate.name(): __update_rate,
__visualize.name(): __visualize,
__pose.name(): __pose,
__topic.name(): __topic,
__sonar.name(): __sonar,
__transceiver.name(): __transceiver
})
_AttributeMap.update({
__name.name(): __name,
__type.name(): __type
})
_module_typeBindings.CTD_ANON_82 = CTD_ANON_82
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_83(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element min uses Python identifier min
__min = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min'), 'min',
'__AbsentNamespace0_CTD_ANON_83_min', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd',
13, 8), )
min = property(__min.value, __min.set, None, '\n Minimum range\n ')
# Element max uses Python identifier max
__max = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max'), 'max',
'__AbsentNamespace0_CTD_ANON_83_max', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd',
22, 8), )
max = property(__max.value, __max.set, None, '\n Max range\n ')
# Element radius uses Python identifier radius
__radius = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'radius'), 'radius',
'__AbsentNamespace0_CTD_ANON_83_radius', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sonar.xsd', 31, 8), )
radius = property(__radius.value, __radius.set, None,
'\n Radius of the sonar cone at max range.\n ')
_ElementMap.update({
__min.name(): __min,
__max.name(): __max,
__radius.name(): __radius
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_83 = CTD_ANON_83
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_84(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/sphere_shape.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element radius uses Python identifier radius
__radius = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'radius'), 'radius',
'__AbsentNamespace0_CTD_ANON_84_radius', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sphere_shape.xsd', 13, 8), )
radius = property(__radius.value, __radius.set, None, '\n radius of the sphere\n ')
_ElementMap.update({
__radius.name(): __radius
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_84 = CTD_ANON_84
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_85(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element bounce uses Python identifier bounce
__bounce = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bounce'), 'bounce',
'__AbsentNamespace0_CTD_ANON_85_bounce', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 13, 8), )
bounce = property(__bounce.value, __bounce.set, None, None)
# Element friction uses Python identifier friction
__friction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'friction'), 'friction',
'__AbsentNamespace0_CTD_ANON_85_friction', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 39, 8), )
friction = property(__friction.value, __friction.set, None, None)
# Element contact uses Python identifier contact
__contact = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'contact'), 'contact',
'__AbsentNamespace0_CTD_ANON_85_contact', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 149, 8), )
contact = property(__contact.value, __contact.set, None, None)
# Element soft_contact uses Python identifier soft_contact
__soft_contact = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'soft_contact'),
'soft_contact',
'__AbsentNamespace0_CTD_ANON_85_soft_contact', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 318, 8), )
soft_contact = property(__soft_contact.value, __soft_contact.set, None, None)
_ElementMap.update({
__bounce.name(): __bounce,
__friction.name(): __friction,
__contact.name(): __contact,
__soft_contact.name(): __soft_contact
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_85 = CTD_ANON_85
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_86(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 14, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element restitution_coefficient uses Python identifier restitution_coefficient
__restitution_coefficient = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'restitution_coefficient'), 'restitution_coefficient',
'__AbsentNamespace0_CTD_ANON_86_restitution_coefficient', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 17, 14), )
restitution_coefficient = property(__restitution_coefficient.value, __restitution_coefficient.set, None,
'\n Bounciness coefficient of restitution, from [0...1], where 0=no bounciness.\n ')
# Element threshold uses Python identifier threshold
__threshold = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'threshold'), 'threshold',
'__AbsentNamespace0_CTD_ANON_86_threshold', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 26, 14), )
threshold = property(__threshold.value, __threshold.set, None,
'\n Bounce capture velocity, below which effective coefficient of restitution is 0.\n ')
_ElementMap.update({
__restitution_coefficient.name(): __restitution_coefficient,
__threshold.name(): __threshold
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_86 = CTD_ANON_86
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_87(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 40, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element ode uses Python identifier ode
__ode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ode'), 'ode',
'__AbsentNamespace0_CTD_ANON_87_ode', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 43, 14), )
ode = property(__ode.value, __ode.set, None, '\n ODE friction parameters\n ')
# Element bullet uses Python identifier bullet
__bullet = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bullet'), 'bullet',
'__AbsentNamespace0_CTD_ANON_87_bullet', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 101, 14), )
bullet = property(__bullet.value, __bullet.set, None, None)
_ElementMap.update({
__ode.name(): __ode,
__bullet.name(): __bullet
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_87 = CTD_ANON_87
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_88(pyxb.binding.basis.complexTypeDefinition):
"""
ODE friction parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 49, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element mu uses Python identifier mu
__mu = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mu'), 'mu',
'__AbsentNamespace0_CTD_ANON_88_mu', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 52, 20), )
mu = property(__mu.value, __mu.set, None,
'\n Coefficient of friction in the range of [0..1].\n ')
# Element mu2 uses Python identifier mu2
__mu2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'mu2'), 'mu2',
'__AbsentNamespace0_CTD_ANON_88_mu2', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 61, 20), )
mu2 = property(__mu2.value, __mu2.set, None,
'\n Second coefficient of friction in the range of [0..1]\n ')
# Element fdir1 uses Python identifier fdir1
__fdir1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'fdir1'), 'fdir1',
'__AbsentNamespace0_CTD_ANON_88_fdir1', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 70, 20), )
fdir1 = property(__fdir1.value, __fdir1.set, None,
'\n 3-tuple specifying direction of mu1 in the collision local reference frame.\n ')
# Element slip1 uses Python identifier slip1
__slip1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'slip1'), 'slip1',
'__AbsentNamespace0_CTD_ANON_88_slip1', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 79, 20), )
slip1 = property(__slip1.value, __slip1.set, None,
'\n Force dependent slip direction 1 in collision local frame, between the range of [0..1].\n ')
# Element slip2 uses Python identifier slip2
__slip2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'slip2'), 'slip2',
'__AbsentNamespace0_CTD_ANON_88_slip2', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 88, 20), )
slip2 = property(__slip2.value, __slip2.set, None,
'\n Force dependent slip direction 2 in collision local frame, between the range of [0..1].\n ')
_ElementMap.update({
__mu.name(): __mu,
__mu2.name(): __mu2,
__fdir1.name(): __fdir1,
__slip1.name(): __slip1,
__slip2.name(): __slip2
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_88 = CTD_ANON_88
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_89(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 102, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element friction uses Python identifier friction
__friction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'friction'), 'friction',
'__AbsentNamespace0_CTD_ANON_89_friction', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 105, 20), )
friction = property(__friction.value, __friction.set, None,
'\n Coefficient of friction in the range of [0..1].\n ')
# Element friction2 uses Python identifier friction2
__friction2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'friction2'), 'friction2',
'__AbsentNamespace0_CTD_ANON_89_friction2', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 114, 20), )
friction2 = property(__friction2.value, __friction2.set, None,
'\n Coefficient of friction in the range of [0..1].\n ')
# Element fdir1 uses Python identifier fdir1
__fdir1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'fdir1'), 'fdir1',
'__AbsentNamespace0_CTD_ANON_89_fdir1', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 123, 20), )
fdir1 = property(__fdir1.value, __fdir1.set, None,
'\n 3-tuple specifying direction of mu1 in the collision local reference frame.\n ')
# Element rolling_friction uses Python identifier rolling_friction
__rolling_friction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'rolling_friction'),
'rolling_friction',
'__AbsentNamespace0_CTD_ANON_89_rolling_friction',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 132, 20), )
rolling_friction = property(__rolling_friction.value, __rolling_friction.set, None,
'\n coefficient of friction in the range of [0..1]\n ')
_ElementMap.update({
__friction.name(): __friction,
__friction2.name(): __friction2,
__fdir1.name(): __fdir1,
__rolling_friction.name(): __rolling_friction
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_89 = CTD_ANON_89
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_90(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 150, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element collide_without_contact uses Python identifier collide_without_contact
__collide_without_contact = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'collide_without_contact'), 'collide_without_contact',
'__AbsentNamespace0_CTD_ANON_90_collide_without_contact', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 153, 14), )
collide_without_contact = property(__collide_without_contact.value, __collide_without_contact.set, None,
'\n Flag to disable contact force generation, while still allowing collision checks and contact visualization to occur.\n ')
# Element collide_without_contact_bitmask uses Python identifier collide_without_contact_bitmask
__collide_without_contact_bitmask = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'collide_without_contact_bitmask'), 'collide_without_contact_bitmask',
'__AbsentNamespace0_CTD_ANON_90_collide_without_contact_bitmask', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 162, 14), )
collide_without_contact_bitmask = property(__collide_without_contact_bitmask.value,
__collide_without_contact_bitmask.set, None,
'\n Bitmask for collision filtering when collide_without_contact is on \n ')
# Element collide_bitmask uses Python identifier collide_bitmask
__collide_bitmask = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'collide_bitmask'),
'collide_bitmask',
'__AbsentNamespace0_CTD_ANON_90_collide_bitmask', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 171,
14), )
collide_bitmask = property(__collide_bitmask.value, __collide_bitmask.set, None,
'\n Bitmask for collision filtering. This will override collide_without_contact\n ')
# Element ode uses Python identifier ode
__ode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'ode'), 'ode',
'__AbsentNamespace0_CTD_ANON_90_ode', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 180, 14), )
ode = property(__ode.value, __ode.set, None, '\n ODE contact parameters\n ')
# Element bullet uses Python identifier bullet
__bullet = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bullet'), 'bullet',
'__AbsentNamespace0_CTD_ANON_90_bullet', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 247, 14), )
bullet = property(__bullet.value, __bullet.set, None,
'\n Bullet contact parameters\n ')
_ElementMap.update({
__collide_without_contact.name(): __collide_without_contact,
__collide_without_contact_bitmask.name(): __collide_without_contact_bitmask,
__collide_bitmask.name(): __collide_bitmask,
__ode.name(): __ode,
__bullet.name(): __bullet
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_90 = CTD_ANON_90
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_91(pyxb.binding.basis.complexTypeDefinition):
"""
ODE contact parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 186, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element soft_cfm uses Python identifier soft_cfm
__soft_cfm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'soft_cfm'), 'soft_cfm',
'__AbsentNamespace0_CTD_ANON_91_soft_cfm', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 189, 20), )
soft_cfm = property(__soft_cfm.value, __soft_cfm.set, None,
'\n Soft constraint force mixing.\n ')
# Element soft_erp uses Python identifier soft_erp
__soft_erp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'soft_erp'), 'soft_erp',
'__AbsentNamespace0_CTD_ANON_91_soft_erp', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 198, 20), )
soft_erp = property(__soft_erp.value, __soft_erp.set, None,
'\n Soft error reduction parameter\n ')
# Element kp uses Python identifier kp
__kp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'kp'), 'kp',
'__AbsentNamespace0_CTD_ANON_91_kp', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 207, 20), )
kp = property(__kp.value, __kp.set, None,
'\n dynamically "stiffness"-equivalent coefficient for contact joints\n ')
# Element kd uses Python identifier kd
__kd = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'kd'), 'kd',
'__AbsentNamespace0_CTD_ANON_91_kd', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 216, 20), )
kd = property(__kd.value, __kd.set, None,
'\n dynamically "damping"-equivalent coefficient for contact joints\n ')
# Element max_vel uses Python identifier max_vel
__max_vel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_vel'), 'max_vel',
'__AbsentNamespace0_CTD_ANON_91_max_vel', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 225, 20), )
max_vel = property(__max_vel.value, __max_vel.set, None,
'\n maximum contact correction velocity truncation term.\n ')
# Element min_depth uses Python identifier min_depth
__min_depth = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_depth'), 'min_depth',
'__AbsentNamespace0_CTD_ANON_91_min_depth', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 234, 20), )
min_depth = property(__min_depth.value, __min_depth.set, None,
'\n minimum allowable depth before contact correction impulse is applied\n ')
_ElementMap.update({
__soft_cfm.name(): __soft_cfm,
__soft_erp.name(): __soft_erp,
__kp.name(): __kp,
__kd.name(): __kd,
__max_vel.name(): __max_vel,
__min_depth.name(): __min_depth
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_91 = CTD_ANON_91
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_92(pyxb.binding.basis.complexTypeDefinition):
"""
Bullet contact parameters
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 253, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element soft_cfm uses Python identifier soft_cfm
__soft_cfm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'soft_cfm'), 'soft_cfm',
'__AbsentNamespace0_CTD_ANON_92_soft_cfm', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 256, 20), )
soft_cfm = property(__soft_cfm.value, __soft_cfm.set, None,
'\n Soft constraint force mixing.\n ')
# Element soft_erp uses Python identifier soft_erp
__soft_erp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'soft_erp'), 'soft_erp',
'__AbsentNamespace0_CTD_ANON_92_soft_erp', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 265, 20), )
soft_erp = property(__soft_erp.value, __soft_erp.set, None,
'\n Soft error reduction parameter\n ')
# Element kp uses Python identifier kp
__kp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'kp'), 'kp',
'__AbsentNamespace0_CTD_ANON_92_kp', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 274, 20), )
kp = property(__kp.value, __kp.set, None,
'\n dynamically "stiffness"-equivalent coefficient for contact joints\n ')
# Element kd uses Python identifier kd
__kd = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'kd'), 'kd',
'__AbsentNamespace0_CTD_ANON_92_kd', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 283, 20), )
kd = property(__kd.value, __kd.set, None,
'\n dynamically "damping"-equivalent coefficient for contact joints\n ')
# Element split_impulse uses Python identifier split_impulse
__split_impulse = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'split_impulse'),
'split_impulse',
'__AbsentNamespace0_CTD_ANON_92_split_impulse', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 292, 20), )
split_impulse = property(__split_impulse.value, __split_impulse.set, None,
"\n Similar to ODE's max_vel implementation. See http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse for more information.\n ")
# Element split_impulse_penetration_threshold uses Python identifier split_impulse_penetration_threshold
__split_impulse_penetration_threshold = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'split_impulse_penetration_threshold'), 'split_impulse_penetration_threshold',
'__AbsentNamespace0_CTD_ANON_92_split_impulse_penetration_threshold', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 301, 20), )
split_impulse_penetration_threshold = property(__split_impulse_penetration_threshold.value,
__split_impulse_penetration_threshold.set, None,
"\n Similar to ODE's max_vel implementation. See http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse for more information.\n ")
_ElementMap.update({
__soft_cfm.name(): __soft_cfm,
__soft_erp.name(): __soft_erp,
__kp.name(): __kp,
__kd.name(): __kd,
__split_impulse.name(): __split_impulse,
__split_impulse_penetration_threshold.name(): __split_impulse_penetration_threshold
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_92 = CTD_ANON_92
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_93(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 319, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element dart uses Python identifier dart
__dart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'dart'), 'dart',
'__AbsentNamespace0_CTD_ANON_93_dart', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 322, 14), )
dart = property(__dart.value, __dart.set, None,
'\n soft contact pamameters based on paper:\n http://www.cc.gatech.edu/graphics/projects/Sumit/homepage/papers/sigasia11/jain_softcontacts_siga11.pdf\n \n ')
_ElementMap.update({
__dart.name(): __dart
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_93 = CTD_ANON_93
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_94(pyxb.binding.basis.complexTypeDefinition):
"""
soft contact pamameters based on paper:
http://www.cc.gatech.edu/graphics/projects/Sumit/homepage/papers/sigasia11/jain_softcontacts_siga11.pdf
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 330, 16)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element bone_attachment uses Python identifier bone_attachment
__bone_attachment = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bone_attachment'),
'bone_attachment',
'__AbsentNamespace0_CTD_ANON_94_bone_attachment', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 333,
20), )
bone_attachment = property(__bone_attachment.value, __bone_attachment.set, None,
'\n This is variable k_v in the soft contacts paper. Its unit is N/m.\n ')
# Element stiffness uses Python identifier stiffness
__stiffness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'stiffness'), 'stiffness',
'__AbsentNamespace0_CTD_ANON_94_stiffness', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 342, 20), )
stiffness = property(__stiffness.value, __stiffness.set, None,
'\n This is variable k_e in the soft contacts paper. Its unit is N/m.\n ')
# Element damping uses Python identifier damping
__damping = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'damping'), 'damping',
'__AbsentNamespace0_CTD_ANON_94_damping', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 351, 20), )
damping = property(__damping.value, __damping.set, None,
'\n Viscous damping of point velocity in body frame. Its unit is N/m/s.\n ')
# Element flesh_mass_fraction uses Python identifier flesh_mass_fraction
__flesh_mass_fraction = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'flesh_mass_fraction'), 'flesh_mass_fraction',
'__AbsentNamespace0_CTD_ANON_94_flesh_mass_fraction', True,
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 360, 20), )
flesh_mass_fraction = property(__flesh_mass_fraction.value, __flesh_mass_fraction.set, None,
'\n Fraction of mass to be distributed among deformable nodes.\n ')
_ElementMap.update({
__bone_attachment.name(): __bone_attachment,
__stiffness.name(): __stiffness,
__damping.name(): __damping,
__flesh_mass_fraction.name(): __flesh_mass_fraction
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_94 = CTD_ANON_94
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_95(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 10, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element essid uses Python identifier essid
__essid = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'essid'), 'essid',
'__AbsentNamespace0_CTD_ANON_95_essid', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 13, 8), )
essid = property(__essid.value, __essid.set, None,
'\n Service set identifier (network name)\n ')
# Element frequency uses Python identifier frequency
__frequency = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'frequency'), 'frequency',
'__AbsentNamespace0_CTD_ANON_95_frequency', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 22, 8), )
frequency = property(__frequency.value, __frequency.set, None,
'\n Specifies the frequency of transmission in MHz\n ')
# Element min_frequency uses Python identifier min_frequency
__min_frequency = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_frequency'),
'min_frequency',
'__AbsentNamespace0_CTD_ANON_95_min_frequency', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 31,
8), )
min_frequency = property(__min_frequency.value, __min_frequency.set, None,
'\n Only a frequency range is filtered. Here we set the lower bound (MHz).\n \n ')
# Element max_frequency uses Python identifier max_frequency
__max_frequency = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'max_frequency'),
'max_frequency',
'__AbsentNamespace0_CTD_ANON_95_max_frequency', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 41,
8), )
max_frequency = property(__max_frequency.value, __max_frequency.set, None,
'\n Only a frequency range is filtered. Here we set the upper bound (MHz).\n \n ')
# Element gain uses Python identifier gain
__gain = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'gain'), 'gain',
'__AbsentNamespace0_CTD_ANON_95_gain', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 51, 8), )
gain = property(__gain.value, __gain.set, None, '\n Specifies the antenna gain in dBi\n ')
# Element power uses Python identifier power
__power = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'power'), 'power',
'__AbsentNamespace0_CTD_ANON_95_power', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 60, 8), )
power = property(__power.value, __power.set, None,
'\n Specifies the transmission power in dBm\n ')
# Element sensitivity uses Python identifier sensitivity
__sensitivity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'sensitivity'),
'sensitivity', '__AbsentNamespace0_CTD_ANON_95_sensitivity',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 69, 8), )
sensitivity = property(__sensitivity.value, __sensitivity.set, None,
'\n Mininum received signal power in dBm\n ')
_ElementMap.update({
__essid.name(): __essid,
__frequency.name(): __frequency,
__min_frequency.name(): __min_frequency,
__max_frequency.name(): __max_frequency,
__gain.name(): __gain,
__power.name(): __power,
__sensitivity.name(): __sensitivity
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_95 = CTD_ANON_95
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_96(pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 13, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element geometry uses Python identifier geometry
__geometry = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'geometry'), 'geometry',
'__AbsentNamespace0_CTD_ANON_96_geometry', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/geometry.xsd', 17, 2), )
geometry = property(__geometry.value, __geometry.set, None, None)
# Element material uses Python identifier material
__material = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'material'), 'material',
'__AbsentNamespace0_CTD_ANON_96_material', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/material.xsd', 9, 2), )
material = property(__material.value, __material.set, None, None)
# Element plugin uses Python identifier plugin
__plugin = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'plugin'), 'plugin',
'__AbsentNamespace0_CTD_ANON_96_plugin', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/plugin.xsd', 9, 2), )
plugin = property(__plugin.value, __plugin.set, None, None)
# Element cast_shadows uses Python identifier cast_shadows
__cast_shadows = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'cast_shadows'),
'cast_shadows',
'__AbsentNamespace0_CTD_ANON_96_cast_shadows', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 16, 8), )
cast_shadows = property(__cast_shadows.value, __cast_shadows.set, None,
'\n If true the visual will cast shadows.\n ')
# Element laser_retro uses Python identifier laser_retro
__laser_retro = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'laser_retro'),
'laser_retro', '__AbsentNamespace0_CTD_ANON_96_laser_retro',
True, pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 25, 8), )
laser_retro = property(__laser_retro.value, __laser_retro.set, None,
'\n will be implemented in the future release.\n ')
# Element transparency uses Python identifier transparency
__transparency = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'transparency'),
'transparency',
'__AbsentNamespace0_CTD_ANON_96_transparency', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 34, 8), )
transparency = property(__transparency.value, __transparency.set, None,
'\n The amount of transparency( 0=opaque, 1 = fully transparent)\n ')
# Element pose uses Python identifier pose
__pose = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'pose'), 'pose',
'__AbsentNamespace0_CTD_ANON_96_pose', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 43, 8), )
pose = property(__pose.value, __pose.set, None,
'\n The reference frame of the visual element, relative to the reference frame of the link.\n ')
# Element meta uses Python identifier meta
__meta = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'meta'), 'meta',
'__AbsentNamespace0_CTD_ANON_96_meta', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 52, 8), )
meta = property(__meta.value, __meta.set, None,
'\n Optional meta information for the visual. The information contained within this element should be used to provide additional feedback to an end user.\n ')
# Attribute name uses Python identifier name
__name = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'name'), 'name',
'__AbsentNamespace0_CTD_ANON_96_name', pyxb.binding.datatypes.string,
required=True)
__name._DeclarationLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 77, 6)
__name._UseLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 77, 6)
name = property(__name.value, __name.set, None,
'\n Unique name for the visual element within the scope of the parent link.\n ')
_ElementMap.update({
__geometry.name(): __geometry,
__material.name(): __material,
__plugin.name(): __plugin,
__cast_shadows.name(): __cast_shadows,
__laser_retro.name(): __laser_retro,
__transparency.name(): __transparency,
__pose.name(): __pose,
__meta.name(): __meta
})
_AttributeMap.update({
__name.name(): __name
})
_module_typeBindings.CTD_ANON_96 = CTD_ANON_96
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_97(pyxb.binding.basis.complexTypeDefinition):
"""
Optional meta information for the visual. The information contained within this element should be used to provide additional feedback to an end user.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 58, 10)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element layer uses Python identifier layer
__layer = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'layer'), 'layer',
'__AbsentNamespace0_CTD_ANON_97_layer', True,
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 61, 14), )
layer = property(__layer.value, __layer.set, None,
'\n The layer in which this visual is displayed. The layer number is useful for programs, such as Gazebo, that put visuals in different layers for enhanced visualization.\n ')
_ElementMap.update({
__layer.name(): __layer
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_97 = CTD_ANON_97
audio_sink = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audio_sink'),
pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_sink.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', audio_sink.name().localName(), audio_sink)
rfidtag = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rfidtag'), pyxb.binding.datatypes.anyType,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/rfid.xsd', 4, 2))
Namespace.addCategoryObject('elementBinding', rfidtag.name().localName(), rfidtag)
rfid = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rfid'), pyxb.binding.datatypes.anyType,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/rfidtag.xsd', 4, 2))
Namespace.addCategoryObject('elementBinding', rfid.name().localName(), rfid)
model = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'model'), CTD_ANON,
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
13, 2))
Namespace.addCategoryObject('elementBinding', model.name().localName(), model)
altimeter = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'altimeter'), CTD_ANON_2,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', altimeter.name().localName(), altimeter)
audio_source = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audio_source'), CTD_ANON_5,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', audio_source.name().localName(), audio_source)
box = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'box'), CTD_ANON_7,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/box_shape.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', box.name().localName(), box)
camera = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'camera'), CTD_ANON_8,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', camera.name().localName(), camera)
collision = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'collision'), CTD_ANON_15,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
11, 2))
Namespace.addCategoryObject('elementBinding', collision.name().localName(), collision)
contact = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'contact'), CTD_ANON_16,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', contact.name().localName(), contact)
cylinder = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'cylinder'), CTD_ANON_17,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', cylinder.name().localName(), cylinder)
force_torque = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'force_torque'), CTD_ANON_18,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/forcetorque.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', force_torque.name().localName(), force_torque)
geometry = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'geometry'), CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd',
17, 2))
Namespace.addCategoryObject('elementBinding', geometry.name().localName(), geometry)
gps = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gps'), CTD_ANON_21,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', gps.name().localName(), gps)
gripper = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gripper'), CTD_ANON_28,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 4,
2))
Namespace.addCategoryObject('elementBinding', gripper.name().localName(), gripper)
heightmap = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heightmap'), CTD_ANON_30,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', heightmap.name().localName(), heightmap)
image = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'image'), CTD_ANON_33,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', image.name().localName(), image)
imu = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'imu'), CTD_ANON_34,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', imu.name().localName(), imu)
inertial = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'inertial'), CTD_ANON_46,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', inertial.name().localName(), inertial)
joint = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'joint'), CTD_ANON_48,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 10, 2))
Namespace.addCategoryObject('elementBinding', joint.name().localName(), joint)
link = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'link'), CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 16, 2))
Namespace.addCategoryObject('elementBinding', link.name().localName(), link)
logical_camera = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'logical_camera'), CTD_ANON_62,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', logical_camera.name().localName(), logical_camera)
magnetometer = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'magnetometer'), CTD_ANON_63,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/magnetometer.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', magnetometer.name().localName(), magnetometer)
material = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'material'), CTD_ANON_67,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', material.name().localName(), material)
mesh = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'mesh'), CTD_ANON_70,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', mesh.name().localName(), mesh)
plane = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plane'), CTD_ANON_72,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', plane.name().localName(), plane)
plugin = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plugin'), CTD_ANON_73,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', plugin.name().localName(), plugin)
polyline = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'polyline'), CTD_ANON_74,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', polyline.name().localName(), polyline)
projector = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'projector'), CTD_ANON_75,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
5, 2))
Namespace.addCategoryObject('elementBinding', projector.name().localName(), projector)
ray = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ray'), CTD_ANON_76,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', ray.name().localName(), ray)
sensor = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sensor'), CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 23,
2))
Namespace.addCategoryObject('elementBinding', sensor.name().localName(), sensor)
sonar = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sonar'), CTD_ANON_83,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', sonar.name().localName(), sonar)
sphere = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sphere'), CTD_ANON_84,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sphere_shape.xsd',
9, 2))
Namespace.addCategoryObject('elementBinding', sphere.name().localName(), sphere)
surface = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'surface'), CTD_ANON_85,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 9,
2))
Namespace.addCategoryObject('elementBinding', surface.name().localName(), surface)
transceiver = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transceiver'), CTD_ANON_95,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/transceiver.xsd', 9, 2))
Namespace.addCategoryObject('elementBinding', transceiver.name().localName(), transceiver)
visual = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'visual'), CTD_ANON_96,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 12,
2))
Namespace.addCategoryObject('elementBinding', visual.name().localName(), visual)
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'static'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON,
documentation='\n If set to true, the model is immovable. Otherwise the model is simulated in the dynamics engine.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
17, 8)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'self_collide'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON,
documentation='\n If set to true, all links in the model will collide with each other (except those connected by a joint). Can be overridden by the link or collision element self_collide property. Two links within a model will collide if link1.self_collide OR link2.self_collide. Links connected by a joint will never collide.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
26, 8)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'allow_auto_disable'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON,
documentation='\n Allows a model to auto-disable, which is means the physics engine can skip updating the model when the model is at rest. This parameter is only used by models with no joints.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
35, 8)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON,
documentation='\n A position and orientation in the global coordinate frame for the model. Position(x,y,z) and rotation (roll, pitch yaw) in the global coordinate frame.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
44, 8)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'include'), CTD_ANON_, scope=CTD_ANON,
documentation='\n Include resources from a URI. This can be used to nest models.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
53, 8)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gripper'), CTD_ANON_28, scope=CTD_ANON,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 4, 2)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'joint'), CTD_ANON_48, scope=CTD_ANON,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 10, 2)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'link'), CTD_ANON_60, scope=CTD_ANON,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 16, 2)))
CTD_ANON._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plugin'), CTD_ANON_73, scope=CTD_ANON,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 9, 2)))
def _BuildAutomaton():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 16, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 25, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 34, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 43, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 52, 8))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(None, 'static')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
17, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(None, 'self_collide')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
26, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(None, 'allow_auto_disable')),
pyxb.utils.utility.Location('/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
35, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
44, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(None, 'include')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
53, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'link')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
101, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'joint')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
102, 8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'plugin')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
103, 8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gripper')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
104, 8))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_8._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON._Automaton = _BuildAutomaton()
CTD_ANON_._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string, scope=CTD_ANON_,
documentation='\n URI to a resource, such as a model\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
62, 14)))
CTD_ANON_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_,
documentation='\n Override the pose of the included model. A position and orientation in the global coordinate frame for the model. Position(x,y,z) and rotation (roll, pitch yaw) in the global coordinate frame.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
71, 14)))
CTD_ANON_._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'name'), pyxb.binding.datatypes.string,
scope=CTD_ANON_,
documentation='\n Override the name of the included model.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
80, 14)))
CTD_ANON_._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'static'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_,
documentation='\n Override the static value of the included model.\n ',
location=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
89, 14)))
def _BuildAutomaton_():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 70, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 79, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd', 88, 14))
counters.add(cc_2)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
62, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
71, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
80, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(None, 'static')),
pyxb.utils.utility.Location(
'/home/gchen/Dropbox/project/HBP-RD/BlenderRobotDesigner/resources/sdf_model.xsd',
89, 14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_._Automaton = _BuildAutomaton_()
CTD_ANON_2._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'vertical_position'), CTD_ANON_3, scope=CTD_ANON_2,
documentation='\n \n Noise parameters for vertical position\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 13,
8)))
CTD_ANON_2._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'vertical_velocity'), CTD_ANON_4, scope=CTD_ANON_2,
documentation='\n \n Noise parameters for vertical velocity\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 28,
8)))
def _BuildAutomaton_2():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 12,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 27,
8))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_2._UseForTag(pyxb.namespace.ExpandedName(None, 'vertical_position')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_2._UseForTag(pyxb.namespace.ExpandedName(None, 'vertical_velocity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 28, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_2._Automaton = _BuildAutomaton_2()
def _BuildAutomaton_3():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_3._Automaton = _BuildAutomaton_3()
def _BuildAutomaton_4():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_4._Automaton = _BuildAutomaton_4()
CTD_ANON_5._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string,
scope=CTD_ANON_5, documentation='\n URI of the audio media.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 13,
8)))
CTD_ANON_5._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pitch'), pyxb.binding.datatypes.double,
scope=CTD_ANON_5,
documentation='\n Pitch for the audio media, in Hz\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 22,
8)))
CTD_ANON_5._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gain'), pyxb.binding.datatypes.double,
scope=CTD_ANON_5,
documentation='\n Gain for the audio media, in dB.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 31,
8)))
CTD_ANON_5._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'contact'), CTD_ANON_6, scope=CTD_ANON_5,
documentation='\n List of collision objects that will trigger audio playback.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 40,
8)))
CTD_ANON_5._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'loop'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_5,
documentation='\n True to make the audio source loop playback.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 62,
8)))
CTD_ANON_5._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_5,
documentation='\n A position and orientation in the parent coordinate frame for the audio source. Position(x,y,z) and rotation (roll, pitch yaw) in the parent coordinate frame.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/audio_source.xsd', 71, 8)))
def _BuildAutomaton_5():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 21,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 30,
8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 39,
8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 61,
8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 70,
8))
counters.add(cc_4)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'pitch')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'gain')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
31, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'contact')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
40, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'loop')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
62, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
71, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_5._Automaton = _BuildAutomaton_5()
CTD_ANON_6._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'collision'), pyxb.binding.datatypes.string,
scope=CTD_ANON_6,
documentation='\n Name of child collision element that will trigger audio playback.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 49,
14)))
def _BuildAutomaton_6():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_6._UseForTag(pyxb.namespace.ExpandedName(None, 'collision')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd',
49, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_6._Automaton = _BuildAutomaton_6()
CTD_ANON_7._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'size'), vector3, scope=CTD_ANON_7,
documentation='\n The three side lengths of the box. The origin of the box is in its geometric center (inside the center of the box).\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/box_shape.xsd', 13, 8)))
def _BuildAutomaton_7():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_7._UseForTag(pyxb.namespace.ExpandedName(None, 'size')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/box_shape.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_7._Automaton = _BuildAutomaton_7()
CTD_ANON_8._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_8,
documentation='\n A position and orientation in the parent coordinate frame for the camera.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/camera.xsd', 13, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'horizontal_fov'), pyxb.binding.datatypes.double,
scope=CTD_ANON_8, documentation='\n Horizontal field of view\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 22, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'image'), CTD_ANON_9, scope=CTD_ANON_8,
documentation='\n The image size in pixels and format.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 31, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'clip'), CTD_ANON_10, scope=CTD_ANON_8,
documentation='\n The near and far clip planes. Objects closer or farther than these planes are not rendered.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 71, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'save'), CTD_ANON_11, scope=CTD_ANON_8,
documentation='\n Enable or disable saving of camera frames.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 102, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'depth_camera'), CTD_ANON_12, scope=CTD_ANON_8,
documentation='\n Depth camera parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 131, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'noise'), CTD_ANON_13, scope=CTD_ANON_8,
documentation='\n The properties of the noise model that should be applied to generated images\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 153, 8)))
CTD_ANON_8._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'distortion'), CTD_ANON_14, scope=CTD_ANON_8,
documentation='\n Lens distortion to be applied to camera images. See http://en.wikipedia.org/wiki/Distortion_(optics)#Software_correction\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 193, 8)))
def _BuildAutomaton_8():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 101, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 130, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 152, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 192, 8))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'horizontal_fov')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 22,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'image')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 31,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'clip')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 71,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'save')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 102,
8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'depth_camera')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 131,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'noise')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 153,
8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_8._UseForTag(pyxb.namespace.ExpandedName(None, 'distortion')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 193,
8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_8._Automaton = _BuildAutomaton_8()
CTD_ANON_9._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'width'), pyxb.binding.datatypes.int, scope=CTD_ANON_9,
documentation='\n Width in pixels\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 40, 14)))
CTD_ANON_9._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'height'), pyxb.binding.datatypes.int,
scope=CTD_ANON_9,
documentation='\n Height in pixels \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 49, 14)))
CTD_ANON_9._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'format'), pyxb.binding.datatypes.string,
scope=CTD_ANON_9,
documentation='\n (L8|R8G8B8|B8G8R8|BAYER_RGGB8|BAYER_BGGR8|BAYER_GBRG8|BAYER_GRBG8)\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 58, 14)))
def _BuildAutomaton_9():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 57, 14))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_9._UseForTag(pyxb.namespace.ExpandedName(None, 'width')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 40,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_9._UseForTag(pyxb.namespace.ExpandedName(None, 'height')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 49,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_9._UseForTag(pyxb.namespace.ExpandedName(None, 'format')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 58,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_9._Automaton = _BuildAutomaton_9()
CTD_ANON_10._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'near'), pyxb.binding.datatypes.double,
scope=CTD_ANON_10,
documentation='\n Near clipping plane\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 80, 14)))
CTD_ANON_10._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'far'), pyxb.binding.datatypes.double,
scope=CTD_ANON_10,
documentation='\n Far clipping plane\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 89, 14)))
def _BuildAutomaton_10():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_10._UseForTag(pyxb.namespace.ExpandedName(None, 'near')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 80,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_10._UseForTag(pyxb.namespace.ExpandedName(None, 'far')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 89,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_10._Automaton = _BuildAutomaton_10()
CTD_ANON_11._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'path'), pyxb.binding.datatypes.string,
scope=CTD_ANON_11,
documentation='\n The path name which will hold the frame data. If path name is relative, then directory is relative to current working directory.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 111, 14)))
def _BuildAutomaton_11():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_11
del _BuildAutomaton_11
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_11._UseForTag(pyxb.namespace.ExpandedName(None, 'path')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 111,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_11._Automaton = _BuildAutomaton_11()
CTD_ANON_12._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'output'), pyxb.binding.datatypes.string,
scope=CTD_ANON_12,
documentation='\n Type of output\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 140, 14)))
def _BuildAutomaton_12():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_12
del _BuildAutomaton_12
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_12._UseForTag(pyxb.namespace.ExpandedName(None, 'output')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 140,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_12._Automaton = _BuildAutomaton_12()
CTD_ANON_13._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'type'), pyxb.binding.datatypes.string,
scope=CTD_ANON_13,
documentation='\n The type of noise. Currently supported types are: "gaussian" (draw additive noise values independently for each pixel from a Gaussian distribution).\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 162, 14)))
CTD_ANON_13._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_13,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 171, 14)))
CTD_ANON_13._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_13,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 180, 14)))
def _BuildAutomaton_13():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_13
del _BuildAutomaton_13
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 170, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 179, 14))
counters.add(cc_1)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_13._UseForTag(pyxb.namespace.ExpandedName(None, 'type')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 162,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_13._UseForTag(pyxb.namespace.ExpandedName(None, 'mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 171,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_13._UseForTag(pyxb.namespace.ExpandedName(None, 'stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 180,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_13._Automaton = _BuildAutomaton_13()
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'k1'), pyxb.binding.datatypes.double,
scope=CTD_ANON_14,
documentation='\n The radial distortion coefficient k1\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 202, 14)))
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'k2'), pyxb.binding.datatypes.double,
scope=CTD_ANON_14,
documentation='\n The radial distortion coefficient k2\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 211, 14)))
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'k3'), pyxb.binding.datatypes.double,
scope=CTD_ANON_14,
documentation='\n The radial distortion coefficient k3\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 220, 14)))
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'p1'), pyxb.binding.datatypes.double,
scope=CTD_ANON_14,
documentation='\n The tangential distortion coefficient p1\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 229, 14)))
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'p2'), pyxb.binding.datatypes.double,
scope=CTD_ANON_14,
documentation='\n The tangential distortion coefficient p2\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 238, 14)))
CTD_ANON_14._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'center'), vector2d, scope=CTD_ANON_14,
documentation='\n The distortion center or principal point\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 247, 14)))
def _BuildAutomaton_14():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_14
del _BuildAutomaton_14
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 201, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 210, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 219, 14))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 228, 14))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 237, 14))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 246, 14))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'k1')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 202,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'k2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 211,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'k3')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 220,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'p1')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 229,
14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'p2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 238,
14))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(None, 'center')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 247,
14))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_14._Automaton = _BuildAutomaton_14()
CTD_ANON_15._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'laser_retro'), pyxb.binding.datatypes.double,
scope=CTD_ANON_15,
documentation='\n intensity value returned by laser sensor.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 15,
8)))
CTD_ANON_15._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_contacts'), pyxb.binding.datatypes.int,
scope=CTD_ANON_15,
documentation='\n Maximum number of contacts allowed between two entities. This value overrides the max_contacts element defined in physics.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 24,
8)))
CTD_ANON_15._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_15,
documentation='\n The reference frame of the collision element, relative to the reference frame of the link.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/collision.xsd', 33, 8)))
CTD_ANON_15._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'geometry'), CTD_ANON_19, scope=CTD_ANON_15,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 17, 2)))
CTD_ANON_15._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'surface'), CTD_ANON_85, scope=CTD_ANON_15,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 9, 2)))
def _BuildAutomaton_15():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_15
del _BuildAutomaton_15
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 14,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 23,
8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 32,
8))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(None, 'laser_retro')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
15, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(None, 'max_contacts')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
24, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
33, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geometry')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
41, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'surface')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd',
42, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_15._Automaton = _BuildAutomaton_15()
CTD_ANON_16._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'collision'), pyxb.binding.datatypes.string,
scope=CTD_ANON_16,
documentation='\n name of the collision element within a link that acts as the contact sensor.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 13, 8)))
CTD_ANON_16._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'topic'), pyxb.binding.datatypes.string,
scope=CTD_ANON_16,
documentation='\n Topic on which contact data is published.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 22, 8)))
def _BuildAutomaton_16():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_16
del _BuildAutomaton_16
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(None, 'collision')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(None, 'topic')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 22,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_16._Automaton = _BuildAutomaton_16()
CTD_ANON_17._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'radius'), pyxb.binding.datatypes.double,
scope=CTD_ANON_17, documentation='\n Radius of the cylinder\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/cylinder_shape.xsd',
13, 8)))
CTD_ANON_17._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'length'), pyxb.binding.datatypes.double,
scope=CTD_ANON_17, documentation='\n Length of the cylinder\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/cylinder_shape.xsd',
22, 8)))
def _BuildAutomaton_17():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_17
del _BuildAutomaton_17
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_17._UseForTag(pyxb.namespace.ExpandedName(None, 'radius')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_17._UseForTag(pyxb.namespace.ExpandedName(None, 'length')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/cylinder_shape.xsd', 22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_17._Automaton = _BuildAutomaton_17()
CTD_ANON_18._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'frame'), pyxb.binding.datatypes.string,
scope=CTD_ANON_18,
documentation='\n \n Frame in which to report the wrench values. Currently supported frames are:\n "parent" report the wrench expressed in the orientation of the parent link frame,\n "child" report the wrench expressed in the orientation of the child link frame,\n "sensor" report the wrench expressed in the orientation of the joint sensor frame.\n Note that for each option the point with respect to which the \n torque component of the wrench is expressed is the joint origin.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 13,
8)))
CTD_ANON_18._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'measure_direction'), pyxb.binding.datatypes.string,
scope=CTD_ANON_18,
documentation='\n \n Direction of the wrench measured by the sensor. The supported options are:\n "parent_to_child" if the measured wrench is the one applied by parent link on the child link,\n "child_to_parent" if the measured wrench is the one applied by the child link on the parent link.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 29,
8)))
def _BuildAutomaton_18():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_18
del _BuildAutomaton_18
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 12,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 28,
8))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(None, 'frame')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(None, 'measure_direction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 29, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_18._Automaton = _BuildAutomaton_18()
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'box'), CTD_ANON_7, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/box_shape.xsd', 9, 2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'cylinder'), CTD_ANON_17, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/cylinder_shape.xsd', 9,
2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'empty'), CTD_ANON_20, scope=CTD_ANON_19,
documentation='\n You can use the empty tag to make empty geometries.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 21, 8)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heightmap'), CTD_ANON_30, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
9, 2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'image'), CTD_ANON_33, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 9,
2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'mesh'), CTD_ANON_70, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 9,
2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plane'), CTD_ANON_72, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd', 9,
2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'polyline'), CTD_ANON_74, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/polyline_shape.xsd', 9,
2)))
CTD_ANON_19._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sphere'), CTD_ANON_84, scope=CTD_ANON_19,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sphere_shape.xsd', 9,
2)))
def _BuildAutomaton_19():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_19
del _BuildAutomaton_19
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 20, 8))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(None, 'empty')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 21,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'box')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 33,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'cylinder')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 34,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heightmap')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 35, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'image')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 36,
8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'mesh')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 37,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'plane')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 38,
8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'polyline')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 39,
8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sphere')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 40,
8))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
st_8._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_19._Automaton = _BuildAutomaton_19()
def _BuildAutomaton_20():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_20
del _BuildAutomaton_20
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_20._Automaton = _BuildAutomaton_20()
CTD_ANON_21._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'position_sensing'), CTD_ANON_22, scope=CTD_ANON_21,
documentation='\n \n Parameters related to GPS position measurement.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 13, 8)))
CTD_ANON_21._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'velocity_sensing'), CTD_ANON_25, scope=CTD_ANON_21,
documentation='\n \n Parameters related to GPS position measurement.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 58, 8)))
def _BuildAutomaton_21():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_21
del _BuildAutomaton_21
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 57, 8))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(None, 'position_sensing')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(None, 'velocity_sensing')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 58, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_21._Automaton = _BuildAutomaton_21()
CTD_ANON_22._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'horizontal'), CTD_ANON_23, scope=CTD_ANON_22,
documentation='\n \n Noise parameters for horizontal position measurement, in units of meters.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 24, 14)))
CTD_ANON_22._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'vertical'), CTD_ANON_24, scope=CTD_ANON_22,
documentation='\n \n Noise parameters for vertical position measurement, in units of meters.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 39, 14)))
def _BuildAutomaton_22():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_22
del _BuildAutomaton_22
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 23, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 38, 14))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(None, 'horizontal')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 24, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(None, 'vertical')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 39, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_22._Automaton = _BuildAutomaton_22()
def _BuildAutomaton_23():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_23
del _BuildAutomaton_23
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_23._Automaton = _BuildAutomaton_23()
def _BuildAutomaton_24():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_24
del _BuildAutomaton_24
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_24._Automaton = _BuildAutomaton_24()
CTD_ANON_25._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'horizontal'), CTD_ANON_26, scope=CTD_ANON_25,
documentation='\n \n Noise parameters for horizontal velocity measurement, in units of meters/second.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 69, 14)))
CTD_ANON_25._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'vertical'), CTD_ANON_27, scope=CTD_ANON_25,
documentation='\n \n Noise parameters for vertical velocity measurement, in units of meters/second.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 84, 14)))
def _BuildAutomaton_25():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_25
del _BuildAutomaton_25
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 68, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 83, 14))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(None, 'horizontal')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 69, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(None, 'vertical')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 84, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_25._Automaton = _BuildAutomaton_25()
def _BuildAutomaton_26():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_26
del _BuildAutomaton_26
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_26._Automaton = _BuildAutomaton_26()
def _BuildAutomaton_27():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_27
del _BuildAutomaton_27
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_27._Automaton = _BuildAutomaton_27()
CTD_ANON_28._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'grasp_check'), CTD_ANON_29, scope=CTD_ANON_28,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 8, 8)))
CTD_ANON_28._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gripper_link'), pyxb.binding.datatypes.string,
scope=CTD_ANON_28,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 28, 8)))
CTD_ANON_28._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'palm_link'), pyxb.binding.datatypes.string,
scope=CTD_ANON_28,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 32, 8)))
def _BuildAutomaton_28():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_28
del _BuildAutomaton_28
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 7, 8))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(None, 'grasp_check')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 8,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(None, 'gripper_link')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 28,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(None, 'palm_link')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 32,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_28._Automaton = _BuildAutomaton_28()
CTD_ANON_29._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'detach_steps'), pyxb.binding.datatypes.int,
scope=CTD_ANON_29,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 12, 14)))
CTD_ANON_29._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'attach_steps'), pyxb.binding.datatypes.int,
scope=CTD_ANON_29,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 16, 14)))
CTD_ANON_29._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_contact_count'),
pyxb.binding.datatypes.unsignedInt, scope=CTD_ANON_29,
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/gripper.xsd', 20, 14)))
def _BuildAutomaton_29():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_29
del _BuildAutomaton_29
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 11, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 15, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 19, 14))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(None, 'detach_steps')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 12,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(None, 'attach_steps')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 16,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(None, 'min_contact_count')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/gripper.xsd', 20, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_29._Automaton = _BuildAutomaton_29()
CTD_ANON_30._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string,
scope=CTD_ANON_30,
documentation='\n URI to a grayscale image file\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
13, 8)))
CTD_ANON_30._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'size'), vector3, scope=CTD_ANON_30,
documentation='\n The size of the heightmap in world units.\n When loading an image: "size" is used if present, otherwise defaults to 1x1x1.\n When loading a DEM: "size" is used if present, otherwise defaults to true size of DEM.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
22, 8)))
CTD_ANON_30._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pos'), vector3, scope=CTD_ANON_30,
documentation='\n A position offset.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 34, 8)))
CTD_ANON_30._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'texture'), CTD_ANON_31, scope=CTD_ANON_30,
documentation='\n The heightmap can contain multiple textures. The order of the texture matters. The first texture will appear at the lowest height, and the last texture at the highest height. Use blend to control the height thresholds and fade between textures.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
43, 8)))
CTD_ANON_30._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'blend'), CTD_ANON_32, scope=CTD_ANON_30,
documentation='\n The blend tag controls how two adjacent textures are mixed. The number of blend elements should equal one less than the number of textures.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
83, 8)))
CTD_ANON_30._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'use_terrain_paging'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_30,
documentation='\n Set if the rendering engine will use terrain paging\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
114, 8)))
def _BuildAutomaton_30():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_30
del _BuildAutomaton_30
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
21, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
33, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
42, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
82, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
113, 8))
counters.add(cc_4)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'size')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'pos')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 34, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'texture')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 43, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'blend')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 83, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(None, 'use_terrain_paging')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd', 114, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_30._Automaton = _BuildAutomaton_30()
CTD_ANON_31._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'size'), pyxb.binding.datatypes.double,
scope=CTD_ANON_31,
documentation='\n Size of the applied texture in meters.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
52, 14)))
CTD_ANON_31._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'diffuse'), pyxb.binding.datatypes.string,
scope=CTD_ANON_31,
documentation='\n Diffuse texture image filename\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
61, 14)))
CTD_ANON_31._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'normal'), pyxb.binding.datatypes.string,
scope=CTD_ANON_31,
documentation='\n Normalmap texture image filename\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
70, 14)))
def _BuildAutomaton_31():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_31
del _BuildAutomaton_31
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(None, 'size')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 52, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(None, 'diffuse')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 61, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(None, 'normal')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 70, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_31._Automaton = _BuildAutomaton_31()
CTD_ANON_32._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_height'), pyxb.binding.datatypes.double,
scope=CTD_ANON_32,
documentation='\n Min height of a blend layer\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
92, 14)))
CTD_ANON_32._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'fade_dist'), pyxb.binding.datatypes.double,
scope=CTD_ANON_32,
documentation='\n Distance over which the blend occurs\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/heightmap_shape.xsd',
101, 14)))
def _BuildAutomaton_32():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_32
del _BuildAutomaton_32
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(None, 'min_height')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 92, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(None, 'fade_dist')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/heightmap_shape.xsd', 101, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_32._Automaton = _BuildAutomaton_32()
CTD_ANON_33._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string,
scope=CTD_ANON_33,
documentation='\n URI of the grayscale image file\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 13,
8)))
CTD_ANON_33._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'scale'), pyxb.binding.datatypes.double,
scope=CTD_ANON_33,
documentation='\n Scaling factor applied to the image\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 22,
8)))
CTD_ANON_33._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'threshold'), pyxb.binding.datatypes.int,
scope=CTD_ANON_33, documentation='\n Grayscale threshold\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 31,
8)))
CTD_ANON_33._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'height'), pyxb.binding.datatypes.double,
scope=CTD_ANON_33,
documentation='\n Height of the extruded boxes\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 40,
8)))
CTD_ANON_33._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'granularity'), pyxb.binding.datatypes.int,
scope=CTD_ANON_33,
documentation='\n The amount of error in the model\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd', 49,
8)))
def _BuildAutomaton_33():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_33
del _BuildAutomaton_33
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(None, 'scale')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(None, 'threshold')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
31, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(None, 'height')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
40, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(None, 'granularity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/image_shape.xsd',
49, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_33._Automaton = _BuildAutomaton_33()
CTD_ANON_34._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'topic'), pyxb.binding.datatypes.string,
scope=CTD_ANON_34,
documentation='\n Topic on which data is published.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 13, 8)))
CTD_ANON_34._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'angular_velocity'), CTD_ANON_35, scope=CTD_ANON_34,
documentation='\n These elements are specific to body-frame angular velocity,\n which is expressed in radians per second\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 22, 8)))
CTD_ANON_34._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'linear_acceleration'), CTD_ANON_39, scope=CTD_ANON_34,
documentation='\n These elements are specific to body-frame linear acceleration,\n which is expressed in meters per second squared\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 75, 8)))
CTD_ANON_34._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'noise'), CTD_ANON_43, scope=CTD_ANON_34,
documentation='\n The properties of the noise model that should be applied to generated data\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 128, 8)))
def _BuildAutomaton_34():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_34
del _BuildAutomaton_34
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 21, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 74, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 127, 8))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(None, 'topic')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(None, 'angular_velocity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(None, 'linear_acceleration')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 75, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(None, 'noise')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 128, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_34._Automaton = _BuildAutomaton_34()
CTD_ANON_35._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'x'), CTD_ANON_36, scope=CTD_ANON_35,
documentation='\n Angular velocity about the X axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 32, 14)))
CTD_ANON_35._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'y'), CTD_ANON_37, scope=CTD_ANON_35,
documentation='\n Angular velocity about the Y axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 45, 14)))
CTD_ANON_35._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'z'), CTD_ANON_38, scope=CTD_ANON_35,
documentation='\n Angular velocity about the Z axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 58, 14)))
def _BuildAutomaton_35():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_35
del _BuildAutomaton_35
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 31, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 44, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 57, 14))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(None, 'x')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 32, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(None, 'y')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 45, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(None, 'z')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 58, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_35._Automaton = _BuildAutomaton_35()
def _BuildAutomaton_36():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_36
del _BuildAutomaton_36
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_36._Automaton = _BuildAutomaton_36()
def _BuildAutomaton_37():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_37
del _BuildAutomaton_37
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_37._Automaton = _BuildAutomaton_37()
def _BuildAutomaton_38():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_38
del _BuildAutomaton_38
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_38._Automaton = _BuildAutomaton_38()
CTD_ANON_39._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'x'), CTD_ANON_40, scope=CTD_ANON_39,
documentation='\n Linear acceleration about the X axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 85, 14)))
CTD_ANON_39._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'y'), CTD_ANON_41, scope=CTD_ANON_39,
documentation='\n Linear acceleration about the Y axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 98, 14)))
CTD_ANON_39._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'z'), CTD_ANON_42, scope=CTD_ANON_39,
documentation='\n Linear acceleration about the Z axis\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 111, 14)))
def _BuildAutomaton_39():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_39
del _BuildAutomaton_39
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 84, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 97, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 110, 14))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_39._UseForTag(pyxb.namespace.ExpandedName(None, 'x')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 85, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_39._UseForTag(pyxb.namespace.ExpandedName(None, 'y')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 98, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_39._UseForTag(pyxb.namespace.ExpandedName(None, 'z')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 111,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_39._Automaton = _BuildAutomaton_39()
def _BuildAutomaton_40():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_40
del _BuildAutomaton_40
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_40._Automaton = _BuildAutomaton_40()
def _BuildAutomaton_41():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_41
del _BuildAutomaton_41
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_41._Automaton = _BuildAutomaton_41()
def _BuildAutomaton_42():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_42
del _BuildAutomaton_42
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_42._Automaton = _BuildAutomaton_42()
CTD_ANON_43._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'type'), pyxb.binding.datatypes.string,
scope=CTD_ANON_43,
documentation='\n The type of noise. Currently supported types are: "gaussian" (draw noise values independently for each beam from a Gaussian distribution).\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 137, 14)))
CTD_ANON_43._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'rate'), CTD_ANON_44, scope=CTD_ANON_43,
documentation='\n Noise parameters for angular rates.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 146, 14)))
CTD_ANON_43._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'accel'), CTD_ANON_45, scope=CTD_ANON_43,
documentation='\n Noise parameters for linear accelerations.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 195, 14)))
def _BuildAutomaton_43():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_43
del _BuildAutomaton_43
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_43._UseForTag(pyxb.namespace.ExpandedName(None, 'type')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 137,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_43._UseForTag(pyxb.namespace.ExpandedName(None, 'rate')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 146,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_43._UseForTag(pyxb.namespace.ExpandedName(None, 'accel')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 195,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_43._Automaton = _BuildAutomaton_43()
CTD_ANON_44._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_44,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 155, 20)))
CTD_ANON_44._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_44,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 164, 20)))
CTD_ANON_44._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bias_mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_44,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which bias values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 173, 20)))
CTD_ANON_44._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bias_stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_44,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which bias values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 182, 20)))
def _BuildAutomaton_44():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_44
del _BuildAutomaton_44
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 154, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 163, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 172, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 181, 20))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_44._UseForTag(pyxb.namespace.ExpandedName(None, 'mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 155,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_44._UseForTag(pyxb.namespace.ExpandedName(None, 'stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 164,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_44._UseForTag(pyxb.namespace.ExpandedName(None, 'bias_mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 173,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_44._UseForTag(pyxb.namespace.ExpandedName(None, 'bias_stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 182,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_44._Automaton = _BuildAutomaton_44()
CTD_ANON_45._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_45,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 204, 20)))
CTD_ANON_45._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_45,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 213, 20)))
CTD_ANON_45._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bias_mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_45,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which bias values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 222, 20)))
CTD_ANON_45._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bias_stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_45,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which bias values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 231, 20)))
def _BuildAutomaton_45():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_45
del _BuildAutomaton_45
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 203, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 212, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 221, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 230, 20))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_45._UseForTag(pyxb.namespace.ExpandedName(None, 'mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 204,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_45._UseForTag(pyxb.namespace.ExpandedName(None, 'stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 213,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_45._UseForTag(pyxb.namespace.ExpandedName(None, 'bias_mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 222,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_45._UseForTag(pyxb.namespace.ExpandedName(None, 'bias_stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 231,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_45._Automaton = _BuildAutomaton_45()
CTD_ANON_46._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mass'), pyxb.binding.datatypes.double,
scope=CTD_ANON_46, documentation='\n The mass of the link.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 13, 8)))
CTD_ANON_46._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_46,
documentation='\n This is the pose of the inertial reference frame, relative to the link reference frame. The origin of the inertial reference frame needs to be at the center of gravity. The axes of the inertial reference frame do not need to be aligned with the principal axes of the inertia.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/inertial.xsd', 22, 8)))
CTD_ANON_46._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'inertia'), CTD_ANON_47, scope=CTD_ANON_46,
documentation='\n The 3x3 rotational inertia matrix. Because the rotational inertia matrix is symmetric, only 6 above-diagonal elements of this matrix are specified here, using the attributes ixx, ixy, ixz, iyy, iyz, izz.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 31, 8)))
def _BuildAutomaton_46():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_46
del _BuildAutomaton_46
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 21, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 30, 8))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_46._UseForTag(pyxb.namespace.ExpandedName(None, 'mass')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_46._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 22,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_46._UseForTag(pyxb.namespace.ExpandedName(None, 'inertia')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 31,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_46._Automaton = _BuildAutomaton_46()
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ixx'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 40,
14)))
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ixy'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 44,
14)))
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ixz'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 48,
14)))
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'iyy'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 52,
14)))
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'iyz'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 56,
14)))
CTD_ANON_47._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'izz'), pyxb.binding.datatypes.double,
scope=CTD_ANON_47,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 60,
14)))
def _BuildAutomaton_47():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_47
del _BuildAutomaton_47
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'ixx')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 40,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'ixy')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 44,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'ixz')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 48,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'iyy')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 52,
14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'iyz')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 56,
14))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_47._UseForTag(pyxb.namespace.ExpandedName(None, 'izz')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 60,
14))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_47._Automaton = _BuildAutomaton_47()
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'parent'), pyxb.binding.datatypes.string,
scope=CTD_ANON_48, documentation='\n Name of the parent link\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 14, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'child'), pyxb.binding.datatypes.string,
scope=CTD_ANON_48, documentation='\n Name of the child link\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 23, 8)))
CTD_ANON_48._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_48,
documentation='\n Pose offset from child link frame to joint frame (expressed in child link frame).\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 32, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gearbox_ratio'), pyxb.binding.datatypes.double,
scope=CTD_ANON_48,
documentation='\n Parameter for gearbox joints. Given theta_1 and theta_2 defined in description for gearbox_reference_body, theta_2 = -gearbox_ratio * theta_1.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 41, 8)))
CTD_ANON_48._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gearbox_reference_body'),
pyxb.binding.datatypes.string, scope=CTD_ANON_48,
documentation='\n Parameter for gearbox joints. Gearbox ratio is enforced over two joint angles. First joint angle (theta_1) is the angle from the gearbox_reference_body to the parent link in the direction of the axis element and the second joint angle (theta_2) is the angle from the gearbox_reference_body to the child link in the direction of the axis2 element.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 50, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'thread_pitch'), pyxb.binding.datatypes.double,
scope=CTD_ANON_48,
documentation='\n Parameter for screw joints.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 59, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'axis'), CTD_ANON_49, scope=CTD_ANON_48,
documentation='\n \n Parameters related to the axis of rotation for revolute joints,\n the axis of translation for prismatic joints.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 68, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'axis2'), CTD_ANON_52, scope=CTD_ANON_48,
documentation='\n \n Parameters related to the second axis of rotation for revolute2 joints and universal joints.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 226, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'physics'), CTD_ANON_55, scope=CTD_ANON_48,
documentation='\n Parameters that are specific to a certain physics engine.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 378, 8)))
CTD_ANON_48._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sensor'), CTD_ANON_82, scope=CTD_ANON_48,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 23, 2)))
def _BuildAutomaton_48():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_48
del _BuildAutomaton_48
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 31, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 40, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 49, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 58, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 67, 8))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 225, 8))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 377, 8))
counters.add(cc_6)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'parent')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 14,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'child')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 23,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 32,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'gearbox_ratio')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 41,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'gearbox_reference_body')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 50, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'thread_pitch')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 59,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'axis')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 68,
8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'axis2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 226,
8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(None, 'physics')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 378,
8))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_48._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sensor')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 567,
8))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False)]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False)]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False)]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False)]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
st_9._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_48._Automaton = _BuildAutomaton_48()
CTD_ANON_49._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'xyz'), vector3, scope=CTD_ANON_49,
documentation='\n \n Represents the x,y,z components of the axis unit vector. The axis is\n expressed in the joint frame unless the use_parent_model_frame\n flag is set to true. The vector should be normalized.\n \n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 80, 14)))
CTD_ANON_49._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'use_parent_model_frame'),
pyxb.binding.datatypes.boolean, scope=CTD_ANON_49,
documentation='\n \n Flag to interpret the axis xyz element in the parent model frame instead\n of joint frame. Provided for Gazebo compatibility\n (see https://bitbucket.org/osrf/gazebo/issue/494 ).\n \n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 93, 14)))
CTD_ANON_49._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'dynamics'), CTD_ANON_50, scope=CTD_ANON_49,
documentation='\n An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 106, 14)))
CTD_ANON_49._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'limit'), CTD_ANON_51, scope=CTD_ANON_49,
documentation='\n specifies the limits of this joint\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 155, 14)))
def _BuildAutomaton_49():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_49
del _BuildAutomaton_49
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 105, 14))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_49._UseForTag(pyxb.namespace.ExpandedName(None, 'xyz')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 80,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_49._UseForTag(pyxb.namespace.ExpandedName(None, 'use_parent_model_frame')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 93, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_49._UseForTag(pyxb.namespace.ExpandedName(None, 'dynamics')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 106,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_49._UseForTag(pyxb.namespace.ExpandedName(None, 'limit')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 155,
14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_49._Automaton = _BuildAutomaton_49()
CTD_ANON_50._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'damping'), pyxb.binding.datatypes.double,
scope=CTD_ANON_50,
documentation='\n The physical velocity dependent viscous damping coefficient of the joint.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 115, 20)))
CTD_ANON_50._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'friction'), pyxb.binding.datatypes.double,
scope=CTD_ANON_50,
documentation='\n The physical static friction value of the joint.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 124, 20)))
CTD_ANON_50._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'spring_reference'), pyxb.binding.datatypes.double,
scope=CTD_ANON_50,
documentation='\n The spring reference position for this joint axis.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 133, 20)))
CTD_ANON_50._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'spring_stiffness'), pyxb.binding.datatypes.double,
scope=CTD_ANON_50,
documentation='\n The spring stiffness for this joint axis.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 142, 20)))
def _BuildAutomaton_50():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_50
del _BuildAutomaton_50
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 114, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 123, 20))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_50._UseForTag(pyxb.namespace.ExpandedName(None, 'damping')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 115,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_50._UseForTag(pyxb.namespace.ExpandedName(None, 'friction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 124,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_50._UseForTag(pyxb.namespace.ExpandedName(None, 'spring_reference')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 133, 20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_50._UseForTag(pyxb.namespace.ExpandedName(None, 'spring_stiffness')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 142, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_50._Automaton = _BuildAutomaton_50()
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'lower'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n An attribute specifying the lower joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 164, 20)))
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'upper'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n An attribute specifying the upper joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 173, 20)))
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'effort'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n An attribute for enforcing the maximum joint effort applied by Joint::SetForce. Limit is not enforced if value is negative.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 182, 20)))
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'velocity'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n (not implemented) An attribute for enforcing the maximum joint velocity.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 191, 20)))
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stiffness'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n Joint stop stiffness. Support physics engines: SimBody.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 200, 20)))
CTD_ANON_51._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'dissipation'), pyxb.binding.datatypes.double,
scope=CTD_ANON_51,
documentation='\n Joint stop dissipation.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 209, 20)))
def _BuildAutomaton_51():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_51
del _BuildAutomaton_51
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 181, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 190, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 199, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 208, 20))
counters.add(cc_3)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'lower')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 164,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'upper')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 173,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'effort')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 182,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'velocity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 191,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'stiffness')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 200,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_51._UseForTag(pyxb.namespace.ExpandedName(None, 'dissipation')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 209,
20))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_51._Automaton = _BuildAutomaton_51()
CTD_ANON_52._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'xyz'), vector3, scope=CTD_ANON_52,
documentation='\n \n Represents the x,y,z components of the axis unit vector. The axis is\n expressed in the joint frame unless the use_parent_model_frame\n flag is set to true. The vector should be normalized.\n \n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 237, 14)))
CTD_ANON_52._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'use_parent_model_frame'),
pyxb.binding.datatypes.boolean, scope=CTD_ANON_52,
documentation='\n \n Flag to interpret the axis xyz element in the parent model frame instead\n of joint frame. Provided for Gazebo compatibility\n (see https://bitbucket.org/osrf/gazebo/issue/494 ).\n \n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 250, 14)))
CTD_ANON_52._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'dynamics'), CTD_ANON_53, scope=CTD_ANON_52,
documentation='\n An element specifying physical properties of the joint. These values are used to specify modeling properties of the joint, particularly useful for simulation.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 263, 14)))
CTD_ANON_52._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'limit'), CTD_ANON_54, scope=CTD_ANON_52,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 312, 14)))
def _BuildAutomaton_52():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_52
del _BuildAutomaton_52
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 262, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 311, 14))
counters.add(cc_1)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_52._UseForTag(pyxb.namespace.ExpandedName(None, 'xyz')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 237,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_52._UseForTag(pyxb.namespace.ExpandedName(None, 'use_parent_model_frame')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 250, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_52._UseForTag(pyxb.namespace.ExpandedName(None, 'dynamics')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 263,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_52._UseForTag(pyxb.namespace.ExpandedName(None, 'limit')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 312,
14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_52._Automaton = _BuildAutomaton_52()
CTD_ANON_53._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'damping'), pyxb.binding.datatypes.double,
scope=CTD_ANON_53,
documentation='\n The physical velocity dependent viscous damping coefficient of the joint. EXPERIMENTAL: if damping coefficient is negative and implicit_spring_damper is true, adaptive damping is used.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 272, 20)))
CTD_ANON_53._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'friction'), pyxb.binding.datatypes.double,
scope=CTD_ANON_53,
documentation='\n The physical static friction value of the joint.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 281, 20)))
CTD_ANON_53._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'spring_reference'), pyxb.binding.datatypes.double,
scope=CTD_ANON_53,
documentation='\n The spring reference position for this joint axis.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 290, 20)))
CTD_ANON_53._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'spring_stiffness'), pyxb.binding.datatypes.double,
scope=CTD_ANON_53,
documentation='\n The spring stiffness for this joint axis.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 299, 20)))
def _BuildAutomaton_53():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_53
del _BuildAutomaton_53
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 271, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 280, 20))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_53._UseForTag(pyxb.namespace.ExpandedName(None, 'damping')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 272,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_53._UseForTag(pyxb.namespace.ExpandedName(None, 'friction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 281,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_53._UseForTag(pyxb.namespace.ExpandedName(None, 'spring_reference')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 290, 20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_53._UseForTag(pyxb.namespace.ExpandedName(None, 'spring_stiffness')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 299, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_53._Automaton = _BuildAutomaton_53()
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'lower'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n An attribute specifying the lower joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 316, 20)))
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'upper'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n An attribute specifying the upper joint limit (radians for revolute joints, meters for prismatic joints). Omit if joint is continuous.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 325, 20)))
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'effort'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n An attribute for enforcing the maximum joint effort applied by Joint::SetForce. Limit is not enforced if value is negative.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 334, 20)))
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'velocity'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n (not implemented) An attribute for enforcing the maximum joint velocity.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 343, 20)))
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stiffness'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n Joint stop stiffness. Supported physics engines: SimBody.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 352, 20)))
CTD_ANON_54._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'dissipation'), pyxb.binding.datatypes.double,
scope=CTD_ANON_54,
documentation='\n Joint stop dissipation. Supported physics engines: SimBody.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 361, 20)))
def _BuildAutomaton_54():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_54
del _BuildAutomaton_54
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 315, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 324, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 333, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 342, 20))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 351, 20))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 360, 20))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'lower')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 316,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'upper')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 325,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'effort')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 334,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'velocity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 343,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'stiffness')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 352,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_54._UseForTag(pyxb.namespace.ExpandedName(None, 'dissipation')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 361,
20))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_54._Automaton = _BuildAutomaton_54()
CTD_ANON_55._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'simbody'), CTD_ANON_56, scope=CTD_ANON_55,
documentation='\n Simbody specific parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 387, 14)))
CTD_ANON_55._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ode'), CTD_ANON_57, scope=CTD_ANON_55,
documentation='\n ODE specific parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 409, 14)))
CTD_ANON_55._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'provide_feedback'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_55,
documentation='\n If provide feedback is set to true, physics engine will compute the constraint forces at this joint. For now, provide_feedback under ode block will override this tag and given user warning about the migration. provide_feedback under ode is scheduled to be removed in SDF 1.5.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 555, 14)))
def _BuildAutomaton_55():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_55
del _BuildAutomaton_55
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 386, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 408, 14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 554, 14))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_55._UseForTag(pyxb.namespace.ExpandedName(None, 'simbody')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 387,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_55._UseForTag(pyxb.namespace.ExpandedName(None, 'ode')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 409,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_55._UseForTag(pyxb.namespace.ExpandedName(None, 'provide_feedback')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 555, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_55._Automaton = _BuildAutomaton_55()
CTD_ANON_56._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'must_be_loop_joint'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_56,
documentation='\n Force cut in the multibody graph at this joint.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 396, 20)))
def _BuildAutomaton_56():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_56
del _BuildAutomaton_56
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 395, 20))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_56._UseForTag(pyxb.namespace.ExpandedName(None, 'must_be_loop_joint')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 396, 20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_56._Automaton = _BuildAutomaton_56()
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'provide_feedback'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_57,
documentation='\n (DEPRECATION WARNING: In SDF 1.5 this tag will be replaced by the same tag directly under the physics-block. For now, this tag overrides the one outside of ode-block, but in SDF 1.5 this tag will be removed completely.) If provide feedback is set to true, ODE will compute the constraint forces at this joint.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 418, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'cfm_damping'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_57,
documentation='\n If cfm damping is set to true, ODE will use CFM to simulate damping, allows for infinite damping, and one additional constraint row (previously used for joint limit) is always active.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 427, 20)))
CTD_ANON_57._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'implicit_spring_damper'),
pyxb.binding.datatypes.boolean, scope=CTD_ANON_57,
documentation='\n If implicit_spring_damper is set to true, ODE will use CFM, ERP to simulate stiffness and damping, allows for infinite damping, and one additional constraint row (previously used for joint limit) is always active. This replaces cfm_damping parameter in sdf 1.4.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/joint.xsd', 436, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'fudge_factor'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n Scale the excess for in a joint motor at joint limits. Should be between zero and one.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 445, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'cfm'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n Constraint force mixing for constrained directions\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 454, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'erp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n Error reduction parameter for constrained directions\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 463, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bounce'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n Bounciness of the limits\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 472, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_force'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n Maximum force or torque used to reach the desired velocity.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 481, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'velocity'), pyxb.binding.datatypes.double,
scope=CTD_ANON_57,
documentation='\n The desired velocity of the joint. Should only be set if you want the joint to move on load.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 490, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'limit'), CTD_ANON_58, scope=CTD_ANON_57,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 499, 20)))
CTD_ANON_57._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'suspension'), CTD_ANON_59, scope=CTD_ANON_57,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 525, 20)))
def _BuildAutomaton_57():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_57
del _BuildAutomaton_57
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 417, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 426, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 435, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 444, 20))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 453, 20))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 462, 20))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 471, 20))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 480, 20))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 489, 20))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 498, 20))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 524, 20))
counters.add(cc_10)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'provide_feedback')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 418, 20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'cfm_damping')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 427,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'implicit_spring_damper')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 436, 20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'fudge_factor')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 445,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'cfm')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 454,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'erp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 463,
20))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'bounce')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 472,
20))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'max_force')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 481,
20))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'velocity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 490,
20))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'limit')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 499,
20))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_57._UseForTag(pyxb.namespace.ExpandedName(None, 'suspension')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 525,
20))
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False)]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False)]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, True)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_8, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, False)]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_9, True)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_9, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_9, False)]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_10, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_10, True)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_10, False)]))
st_10._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_57._Automaton = _BuildAutomaton_57()
CTD_ANON_58._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'cfm'), pyxb.binding.datatypes.double,
scope=CTD_ANON_58,
documentation='\n Constraint force mixing parameter used by the joint stop\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 503, 26)))
CTD_ANON_58._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'erp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_58,
documentation='\n Error reduction parameter used by the joint stop\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 512, 26)))
def _BuildAutomaton_58():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_58
del _BuildAutomaton_58
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_58._UseForTag(pyxb.namespace.ExpandedName(None, 'cfm')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 503,
26))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_58._UseForTag(pyxb.namespace.ExpandedName(None, 'erp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 512,
26))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_58._Automaton = _BuildAutomaton_58()
CTD_ANON_59._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'cfm'), pyxb.binding.datatypes.double,
scope=CTD_ANON_59,
documentation='\n Suspension constraint force mixing parameter\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 529, 26)))
CTD_ANON_59._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'erp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_59,
documentation='\n Suspension error reduction parameter\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 538, 26)))
def _BuildAutomaton_59():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_59
del _BuildAutomaton_59
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_59._UseForTag(pyxb.namespace.ExpandedName(None, 'cfm')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 529,
26))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_59._UseForTag(pyxb.namespace.ExpandedName(None, 'erp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/joint.xsd', 538,
26))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_59._Automaton = _BuildAutomaton_59()
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audio_sink'), pyxb.binding.datatypes.anyType,
scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_sink.xsd', 9,
2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audio_source'), CTD_ANON_5, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/audio_source.xsd', 9,
2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'collision'), CTD_ANON_15, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/collision.xsd', 11,
2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'inertial'), CTD_ANON_46, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/inertial.xsd', 9, 2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gravity'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_60,
documentation='\n If true, the link is affected by gravity.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 20, 8)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'self_collide'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_60,
documentation='\n If true, the link can collide with other links in the model. Two links within a model will collide if link1.self_collide OR link2.self_collide. Links connected by a joint will never collide.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 29, 8)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'kinematic'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_60,
documentation='\n If true, the link is kinematic only\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 38, 8)))
CTD_ANON_60._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_60,
documentation='\n This is the pose of the link reference frame, relative to the model reference frame.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/link.xsd', 47, 8)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'must_be_base_link'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_60,
documentation='\n If true, the link will have 6DOF and be a direct child of world.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 56, 8)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'velocity_decay'), CTD_ANON_61, scope=CTD_ANON_60,
documentation="\n Exponential damping of the link's velocity.\n ",
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 65, 8)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'projector'), CTD_ANON_75, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 5, 2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sensor'), CTD_ANON_82, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 23, 2)))
CTD_ANON_60._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'visual'), CTD_ANON_96, scope=CTD_ANON_60,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 12, 2)))
def _BuildAutomaton_60():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_60
del _BuildAutomaton_60
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 19, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 28, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 37, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 46, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 55, 8))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 64, 8))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'gravity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 20, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'self_collide')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 29, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'kinematic')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 38, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 47, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'must_be_base_link')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 56, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(None, 'velocity_decay')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 65, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'inertial')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 95, 8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'collision')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 96, 8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'visual')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 97, 8))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sensor')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 98, 8))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'projector')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 99, 8))
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'audio_sink')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 100, 8))
st_11 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_60._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'audio_source')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 101, 8))
st_12 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_12._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_60._Automaton = _BuildAutomaton_60()
CTD_ANON_61._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'linear'), pyxb.binding.datatypes.double,
scope=CTD_ANON_61,
documentation='\n Linear damping\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 74, 14)))
CTD_ANON_61._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'angular'), pyxb.binding.datatypes.double,
scope=CTD_ANON_61,
documentation='\n Angular damping\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 83, 14)))
def _BuildAutomaton_61():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_61
del _BuildAutomaton_61
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 73, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 82, 14))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_61._UseForTag(pyxb.namespace.ExpandedName(None, 'linear')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 74,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_61._UseForTag(pyxb.namespace.ExpandedName(None, 'angular')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/link.xsd', 83,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_61._Automaton = _BuildAutomaton_61()
CTD_ANON_62._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'near'), pyxb.binding.datatypes.double,
scope=CTD_ANON_62,
documentation='\n Near clipping distance of the view frustum\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd',
13, 8)))
CTD_ANON_62._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'far'), pyxb.binding.datatypes.double,
scope=CTD_ANON_62,
documentation='\n Far clipping distance of the view frustum\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd',
22, 8)))
CTD_ANON_62._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'aspect_ratio'), pyxb.binding.datatypes.double,
scope=CTD_ANON_62,
documentation='\n Aspect ratio of the near and far planes. This is the width divided by the height of the near or far planes.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd',
31, 8)))
CTD_ANON_62._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'horizontal_fov'), pyxb.binding.datatypes.double,
scope=CTD_ANON_62,
documentation="\n Horizontal field of view of the frustum, in radians. This is the angle between the frustum's vertex and the edges of the near or far plane.\n ",
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd',
40, 8)))
def _BuildAutomaton_62():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_62
del _BuildAutomaton_62
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_62._UseForTag(pyxb.namespace.ExpandedName(None, 'near')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_62._UseForTag(pyxb.namespace.ExpandedName(None, 'far')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_62._UseForTag(pyxb.namespace.ExpandedName(None, 'aspect_ratio')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/logical_camera.xsd', 31, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_62._UseForTag(pyxb.namespace.ExpandedName(None, 'horizontal_fov')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd', 40, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_62._Automaton = _BuildAutomaton_62()
CTD_ANON_63._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'x'), CTD_ANON_64, scope=CTD_ANON_63,
documentation='\n \n Parameters related to the body-frame X axis of the magnetometer\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 13,
8)))
CTD_ANON_63._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'y'), CTD_ANON_65, scope=CTD_ANON_63,
documentation='\n \n Parameters related to the body-frame Y axis of the magnetometer\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 28,
8)))
CTD_ANON_63._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'z'), CTD_ANON_66, scope=CTD_ANON_63,
documentation='\n \n Parameters related to the body-frame Z axis of the magnetometer\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 43,
8)))
def _BuildAutomaton_63():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_63
del _BuildAutomaton_63
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 12,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 27,
8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 42,
8))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_63._UseForTag(pyxb.namespace.ExpandedName(None, 'x')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_63._UseForTag(pyxb.namespace.ExpandedName(None, 'y')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd',
28, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_63._UseForTag(pyxb.namespace.ExpandedName(None, 'z')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd',
43, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_63._Automaton = _BuildAutomaton_63()
def _BuildAutomaton_64():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_64
del _BuildAutomaton_64
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_64._Automaton = _BuildAutomaton_64()
def _BuildAutomaton_65():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_65
del _BuildAutomaton_65
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_65._Automaton = _BuildAutomaton_65()
def _BuildAutomaton_66():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_66
del _BuildAutomaton_66
import pyxb.utils.fac as fac
counters = set()
states = []
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_66._Automaton = _BuildAutomaton_66()
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'script'), CTD_ANON_68, scope=CTD_ANON_67,
documentation='\n Name of material from an installed script file. This will override the color element if the script exists.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 13, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'shader'), CTD_ANON_69, scope=CTD_ANON_67,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 44, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'lighting'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_67,
documentation='\n If false, dynamic lighting will be disabled\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 68, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ambient'), color, scope=CTD_ANON_67,
documentation='\n The ambient color of a material specified by set of four numbers representing red/green/blue, each in the range of [0,1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 77, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'diffuse'), color, scope=CTD_ANON_67,
documentation='\n The diffuse color of a material specified by set of four numbers representing red/green/blue/alpha, each in the range of [0,1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 86, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'specular'), color, scope=CTD_ANON_67,
documentation='\n The specular color of a material specified by set of four numbers representing red/green/blue/alpha, each in the range of [0,1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 95, 8)))
CTD_ANON_67._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'emissive'), color, scope=CTD_ANON_67,
documentation='\n The emissive color of a material specified by set of four numbers representing red/green/blue, each in the range of [0,1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 104,
8)))
def _BuildAutomaton_67():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_67
del _BuildAutomaton_67
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 43, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 67, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 76, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 85, 8))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 94, 8))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 103,
8))
counters.add(cc_6)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'script')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'shader')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 44,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'lighting')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 68,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'ambient')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 77,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'diffuse')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 86,
8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'specular')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 95,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_67._UseForTag(pyxb.namespace.ExpandedName(None, 'emissive')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd',
104, 8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_6, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, False)]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_67._Automaton = _BuildAutomaton_67()
CTD_ANON_68._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string,
scope=CTD_ANON_68,
documentation='\n URI of the material script file\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 22,
14)))
CTD_ANON_68._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'name'), pyxb.binding.datatypes.string,
scope=CTD_ANON_68,
documentation='\n Name of the script within the script file\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 31,
14)))
def _BuildAutomaton_68():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_68
del _BuildAutomaton_68
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_68._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 22,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_68._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 31,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_68._Automaton = _BuildAutomaton_68()
CTD_ANON_69._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'normal_map'), pyxb.binding.datatypes.string,
scope=CTD_ANON_69,
documentation='\n filename of the normal map\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 48,
14)))
def _BuildAutomaton_69():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_69
del _BuildAutomaton_69
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 47,
14))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_69._UseForTag(pyxb.namespace.ExpandedName(None, 'normal_map')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 48,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_69._Automaton = _BuildAutomaton_69()
CTD_ANON_70._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'uri'), pyxb.binding.datatypes.string,
scope=CTD_ANON_70, documentation='\n Mesh uri\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 13,
8)))
CTD_ANON_70._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'submesh'), CTD_ANON_71, scope=CTD_ANON_70,
documentation='\n Use a named submesh. The submesh must exist in the mesh specified by the uri\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 22,
8)))
CTD_ANON_70._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'scale'), vector3, scope=CTD_ANON_70,
documentation='\n Scaling factor applied to the mesh\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 53,
8)))
def _BuildAutomaton_70():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_70
del _BuildAutomaton_70
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 21,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 52,
8))
counters.add(cc_1)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_70._UseForTag(pyxb.namespace.ExpandedName(None, 'uri')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_70._UseForTag(pyxb.namespace.ExpandedName(None, 'submesh')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd',
22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_70._UseForTag(pyxb.namespace.ExpandedName(None, 'scale')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd',
53, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_70._Automaton = _BuildAutomaton_70()
CTD_ANON_71._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'name'), pyxb.binding.datatypes.string,
scope=CTD_ANON_71,
documentation='\n Name of the submesh within the parent mesh\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 31,
14)))
CTD_ANON_71._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'center'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_71,
documentation='\n Set to true to center the vertices of the submesh at 0,0,0. This will effectively remove any transformations on the submesh before the poses from parent links and models are applied.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 40,
14)))
def _BuildAutomaton_71():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_71
del _BuildAutomaton_71
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd', 39,
14))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_71._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd',
31, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_71._UseForTag(pyxb.namespace.ExpandedName(None, 'center')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/mesh_shape.xsd',
40, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_71._Automaton = _BuildAutomaton_71()
CTD_ANON_72._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'normal'), vector3, scope=CTD_ANON_72,
documentation='\n Normal direction for the plane\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd', 13,
8)))
CTD_ANON_72._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'size'), vector2d, scope=CTD_ANON_72,
documentation='\n Length of each side of the plane\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd', 22,
8)))
def _BuildAutomaton_72():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_72
del _BuildAutomaton_72
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_72._UseForTag(pyxb.namespace.ExpandedName(None, 'normal')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_72._UseForTag(pyxb.namespace.ExpandedName(None, 'size')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/plane_shape.xsd',
22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_72._Automaton = _BuildAutomaton_72()
def _BuildAutomaton_73():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_73
del _BuildAutomaton_73
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 12, 8))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.WildcardUse(
pyxb.binding.content.Wildcard(process_contents=pyxb.binding.content.Wildcard.PC_lax,
namespace_constraint=pyxb.binding.content.Wildcard.NC_any),
pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 12, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_73._Automaton = _BuildAutomaton_73()
CTD_ANON_74._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'point'), vector2d, scope=CTD_ANON_74,
documentation='\n \n A series of points that define the path of the polyline.\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/polyline_shape.xsd',
13, 8)))
CTD_ANON_74._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'height'), pyxb.binding.datatypes.double,
scope=CTD_ANON_74, documentation='\n Height of the polyline\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/polyline_shape.xsd',
24, 8)))
def _BuildAutomaton_74():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_74
del _BuildAutomaton_74
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_74._UseForTag(pyxb.namespace.ExpandedName(None, 'point')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_74._UseForTag(pyxb.namespace.ExpandedName(None, 'height')),
pyxb.utils.utility.Location(
'http://sdformat.org/schemas/polyline_shape.xsd', 24, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_74._Automaton = _BuildAutomaton_74()
CTD_ANON_75._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plugin'), CTD_ANON_73, scope=CTD_ANON_75,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 9, 2)))
CTD_ANON_75._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'texture'), pyxb.binding.datatypes.string,
scope=CTD_ANON_75, documentation='\n Texture name\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 9, 8)))
CTD_ANON_75._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_75,
documentation='\n Pose of the projector\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/projector.xsd', 18, 8)))
CTD_ANON_75._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'fov'), pyxb.binding.datatypes.double,
scope=CTD_ANON_75, documentation='\n Field of view\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 27,
8)))
CTD_ANON_75._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'near_clip'), pyxb.binding.datatypes.double,
scope=CTD_ANON_75, documentation='\n Near clip distance\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 36,
8)))
CTD_ANON_75._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'far_clip'), pyxb.binding.datatypes.double,
scope=CTD_ANON_75, documentation='\n far clip distance\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 45,
8)))
def _BuildAutomaton_75():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_75
del _BuildAutomaton_75
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 17,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 26,
8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 35,
8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 44,
8))
counters.add(cc_3)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(None, 'texture')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd', 9,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
18, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(None, 'fov')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
27, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(None, 'near_clip')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
36, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(None, 'far_clip')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
45, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_75._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'plugin')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/projector.xsd',
53, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_75._Automaton = _BuildAutomaton_75()
CTD_ANON_76._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'scan'), CTD_ANON_77, scope=CTD_ANON_76,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 13, 8)))
CTD_ANON_76._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'range'), CTD_ANON_80, scope=CTD_ANON_76,
documentation='\n specifies range properties of each simulated ray\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 99, 8)))
CTD_ANON_76._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'noise'), CTD_ANON_81, scope=CTD_ANON_76,
documentation='\n The properties of the noise model that should be applied to generated scans\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 139, 8)))
def _BuildAutomaton_76():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_76
del _BuildAutomaton_76
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 138, 8))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_76._UseForTag(pyxb.namespace.ExpandedName(None, 'scan')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_76._UseForTag(pyxb.namespace.ExpandedName(None, 'range')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 99, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_76._UseForTag(pyxb.namespace.ExpandedName(None, 'noise')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 139, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_76._Automaton = _BuildAutomaton_76()
CTD_ANON_77._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'horizontal'), CTD_ANON_78, scope=CTD_ANON_77,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 17, 14)))
CTD_ANON_77._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'vertical'), CTD_ANON_79, scope=CTD_ANON_77,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 56, 14)))
def _BuildAutomaton_77():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_77
del _BuildAutomaton_77
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 55, 14))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_77._UseForTag(pyxb.namespace.ExpandedName(None, 'horizontal')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 17, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_77._UseForTag(pyxb.namespace.ExpandedName(None, 'vertical')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 56, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_77._Automaton = _BuildAutomaton_77()
CTD_ANON_78._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'samples'), pyxb.binding.datatypes.unsignedInt,
scope=CTD_ANON_78,
documentation='\n The number of simulated rays to generate per complete laser sweep cycle.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 21, 20)))
CTD_ANON_78._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'resolution'), pyxb.binding.datatypes.double,
scope=CTD_ANON_78,
documentation='\n This number is multiplied by samples to determine the number of range data points returned. If resolution is less than one, range data is interpolated. If resolution is greater than one, range data is averaged.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 30, 20)))
CTD_ANON_78._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_angle'), pyxb.binding.datatypes.double,
scope=CTD_ANON_78,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 39, 20)))
CTD_ANON_78._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_angle'), pyxb.binding.datatypes.double,
scope=CTD_ANON_78,
documentation='\n Must be greater or equal to min_angle\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 43, 20)))
def _BuildAutomaton_78():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_78
del _BuildAutomaton_78
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_78._UseForTag(pyxb.namespace.ExpandedName(None, 'samples')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 21, 20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_78._UseForTag(pyxb.namespace.ExpandedName(None, 'resolution')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 30, 20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_78._UseForTag(pyxb.namespace.ExpandedName(None, 'min_angle')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 39, 20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_78._UseForTag(pyxb.namespace.ExpandedName(None, 'max_angle')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 43, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_78._Automaton = _BuildAutomaton_78()
CTD_ANON_79._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'samples'), pyxb.binding.datatypes.unsignedInt,
scope=CTD_ANON_79,
documentation='\n The number of simulated rays to generate per complete laser sweep cycle.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 60, 20)))
CTD_ANON_79._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'resolution'), pyxb.binding.datatypes.double,
scope=CTD_ANON_79,
documentation='\n This number is multiplied by samples to determine the number of range data points returned. If resolution is less than one, range data is interpolated. If resolution is greater than one, range data is averaged.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 69, 20)))
CTD_ANON_79._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_angle'), pyxb.binding.datatypes.double,
scope=CTD_ANON_79,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 78, 20)))
CTD_ANON_79._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_angle'), pyxb.binding.datatypes.double,
scope=CTD_ANON_79,
documentation='\n Must be greater or equal to min_angle\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 82, 20)))
def _BuildAutomaton_79():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_79
del _BuildAutomaton_79
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 68, 20))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_79._UseForTag(pyxb.namespace.ExpandedName(None, 'samples')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 60, 20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_79._UseForTag(pyxb.namespace.ExpandedName(None, 'resolution')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 69, 20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_79._UseForTag(pyxb.namespace.ExpandedName(None, 'min_angle')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 78, 20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_79._UseForTag(pyxb.namespace.ExpandedName(None, 'max_angle')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 82, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_79._Automaton = _BuildAutomaton_79()
CTD_ANON_80._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min'), pyxb.binding.datatypes.double,
scope=CTD_ANON_80,
documentation='\n The minimum distance for each ray.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 108, 14)))
CTD_ANON_80._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max'), pyxb.binding.datatypes.double,
scope=CTD_ANON_80,
documentation='\n The maximum distance for each ray.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 117, 14)))
CTD_ANON_80._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'resolution'), pyxb.binding.datatypes.double,
scope=CTD_ANON_80,
documentation='\n Linear resolution of each ray.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 126, 14)))
def _BuildAutomaton_80():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_80
del _BuildAutomaton_80
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 125, 14))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_80._UseForTag(pyxb.namespace.ExpandedName(None, 'min')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 108,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_80._UseForTag(pyxb.namespace.ExpandedName(None, 'max')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 117,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_80._UseForTag(pyxb.namespace.ExpandedName(None, 'resolution')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 126,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_80._Automaton = _BuildAutomaton_80()
CTD_ANON_81._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'type'), pyxb.binding.datatypes.string,
scope=CTD_ANON_81,
documentation='\n The type of noise. Currently supported types are: "gaussian" (draw noise values independently for each beam from a Gaussian distribution).\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 148, 14)))
CTD_ANON_81._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mean'), pyxb.binding.datatypes.double,
scope=CTD_ANON_81,
documentation='\n For type "gaussian," the mean of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 157, 14)))
CTD_ANON_81._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stddev'), pyxb.binding.datatypes.double,
scope=CTD_ANON_81,
documentation='\n For type "gaussian," the standard deviation of the Gaussian distribution from which noise values are drawn.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 166, 14)))
def _BuildAutomaton_81():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_81
del _BuildAutomaton_81
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 156, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 165, 14))
counters.add(cc_1)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_81._UseForTag(pyxb.namespace.ExpandedName(None, 'type')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 148,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_81._UseForTag(pyxb.namespace.ExpandedName(None, 'mean')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 157,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_81._UseForTag(pyxb.namespace.ExpandedName(None, 'stddev')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 166,
14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_81._Automaton = _BuildAutomaton_81()
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'altimeter'), CTD_ANON_2, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/altimeter.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'camera'), CTD_ANON_8, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/camera.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'contact'), CTD_ANON_16, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/contact.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'force_torque'), CTD_ANON_18, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/forcetorque.xsd', 9,
2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gps'), CTD_ANON_21, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/gps.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'imu'), CTD_ANON_34, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/imu.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'logical_camera'), CTD_ANON_62, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/logical_camera.xsd', 9,
2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'magnetometer'), CTD_ANON_63, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/magnetometer.xsd', 9,
2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plugin'), CTD_ANON_73, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ray'), CTD_ANON_76, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/ray.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rfidtag'), pyxb.binding.datatypes.anyType,
scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/rfid.xsd', 4, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rfid'), pyxb.binding.datatypes.anyType,
scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/rfidtag.xsd', 4, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'always_on'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_82,
documentation='\n If true the sensor will always be updated according to the update rate.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 27, 8)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'update_rate'), pyxb.binding.datatypes.double,
scope=CTD_ANON_82,
documentation='\n The frequency at which the sensor data is generated. If left unspecified, the sensor will generate data every cycle.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 36, 8)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'visualize'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_82,
documentation='\n If true, the sensor is visualized in the GUI\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 45, 8)))
CTD_ANON_82._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_82,
documentation='\n This is the pose of the sensor, relative to the parent (link or joint) reference frame.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/sensor.xsd', 54, 8)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'topic'), pyxb.binding.datatypes.string,
scope=CTD_ANON_82,
documentation='\n Name of the topic on which data is published. This is necessary for visualization\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 63, 8)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sonar'), CTD_ANON_83, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 9, 2)))
CTD_ANON_82._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transceiver'), CTD_ANON_95, scope=CTD_ANON_82,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 9,
2)))
def _BuildAutomaton_82():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_82
del _BuildAutomaton_82
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 26, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 35, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 44, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 53, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 62, 8))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(None, 'always_on')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 27,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(None, 'update_rate')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 36,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(None, 'visualize')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 45,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 54,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(None, 'topic')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 63,
8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'plugin')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 71,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'altimeter')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 72, 8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'camera')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 73,
8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'contact')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 74,
8))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gps')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 75,
8))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'imu')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 76,
8))
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'logical_camera')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 77, 8))
st_11 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'magnetometer')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 78, 8))
st_12 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ray')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 79,
8))
st_13 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'rfidtag')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 80,
8))
st_14 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'rfid')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 81,
8))
st_15 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sonar')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 82,
8))
st_16 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transceiver')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 83, 8))
st_17 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_82._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'force_torque')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sensor.xsd', 84, 8))
st_18 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_18)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_17._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
st_18._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_82._Automaton = _BuildAutomaton_82()
CTD_ANON_83._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min'), pyxb.binding.datatypes.double,
scope=CTD_ANON_83, documentation='\n Minimum range\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 13, 8)))
CTD_ANON_83._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max'), pyxb.binding.datatypes.double,
scope=CTD_ANON_83, documentation='\n Max range\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 22, 8)))
CTD_ANON_83._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'radius'), pyxb.binding.datatypes.double,
scope=CTD_ANON_83,
documentation='\n Radius of the sonar cone at max range.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 31, 8)))
def _BuildAutomaton_83():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_83
del _BuildAutomaton_83
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_83._UseForTag(pyxb.namespace.ExpandedName(None, 'min')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_83._UseForTag(pyxb.namespace.ExpandedName(None, 'max')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 22,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_83._UseForTag(pyxb.namespace.ExpandedName(None, 'radius')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sonar.xsd', 31,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_83._Automaton = _BuildAutomaton_83()
CTD_ANON_84._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'radius'), pyxb.binding.datatypes.double,
scope=CTD_ANON_84, documentation='\n radius of the sphere\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/sphere_shape.xsd', 13,
8)))
def _BuildAutomaton_84():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_84
del _BuildAutomaton_84
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_84._UseForTag(pyxb.namespace.ExpandedName(None, 'radius')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/sphere_shape.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_84._Automaton = _BuildAutomaton_84()
CTD_ANON_85._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bounce'), CTD_ANON_86, scope=CTD_ANON_85,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 13, 8)))
CTD_ANON_85._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'friction'), CTD_ANON_87, scope=CTD_ANON_85,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 39, 8)))
CTD_ANON_85._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'contact'), CTD_ANON_90, scope=CTD_ANON_85,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 149, 8)))
CTD_ANON_85._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'soft_contact'), CTD_ANON_93, scope=CTD_ANON_85,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 318, 8)))
def _BuildAutomaton_85():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_85
del _BuildAutomaton_85
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 12, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 38, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 148, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 317, 8))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_85._UseForTag(pyxb.namespace.ExpandedName(None, 'bounce')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 13,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_85._UseForTag(pyxb.namespace.ExpandedName(None, 'friction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 39,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_85._UseForTag(pyxb.namespace.ExpandedName(None, 'contact')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 149,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_85._UseForTag(pyxb.namespace.ExpandedName(None, 'soft_contact')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 318,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_85._Automaton = _BuildAutomaton_85()
CTD_ANON_86._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'restitution_coefficient'),
pyxb.binding.datatypes.double, scope=CTD_ANON_86,
documentation='\n Bounciness coefficient of restitution, from [0...1], where 0=no bounciness.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 17, 14)))
CTD_ANON_86._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'threshold'), pyxb.binding.datatypes.double,
scope=CTD_ANON_86,
documentation='\n Bounce capture velocity, below which effective coefficient of restitution is 0.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 26, 14)))
def _BuildAutomaton_86():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_86
del _BuildAutomaton_86
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 16, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 25, 14))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_86._UseForTag(pyxb.namespace.ExpandedName(None, 'restitution_coefficient')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 17, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_86._UseForTag(pyxb.namespace.ExpandedName(None, 'threshold')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 26,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_86._Automaton = _BuildAutomaton_86()
CTD_ANON_87._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ode'), CTD_ANON_88, scope=CTD_ANON_87,
documentation='\n ODE friction parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 43, 14)))
CTD_ANON_87._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bullet'), CTD_ANON_89, scope=CTD_ANON_87,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 101,
14)))
def _BuildAutomaton_87():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_87
del _BuildAutomaton_87
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 42, 14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 100,
14))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_87._UseForTag(pyxb.namespace.ExpandedName(None, 'ode')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 43,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_87._UseForTag(pyxb.namespace.ExpandedName(None, 'bullet')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 101,
14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_87._Automaton = _BuildAutomaton_87()
CTD_ANON_88._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mu'), pyxb.binding.datatypes.double,
scope=CTD_ANON_88,
documentation='\n Coefficient of friction in the range of [0..1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 52, 20)))
CTD_ANON_88._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'mu2'), pyxb.binding.datatypes.double,
scope=CTD_ANON_88,
documentation='\n Second coefficient of friction in the range of [0..1]\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 61, 20)))
CTD_ANON_88._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'fdir1'), vector3, scope=CTD_ANON_88,
documentation='\n 3-tuple specifying direction of mu1 in the collision local reference frame.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 70, 20)))
CTD_ANON_88._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'slip1'), pyxb.binding.datatypes.double,
scope=CTD_ANON_88,
documentation='\n Force dependent slip direction 1 in collision local frame, between the range of [0..1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 79, 20)))
CTD_ANON_88._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'slip2'), pyxb.binding.datatypes.double,
scope=CTD_ANON_88,
documentation='\n Force dependent slip direction 2 in collision local frame, between the range of [0..1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 88, 20)))
def _BuildAutomaton_88():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_88
del _BuildAutomaton_88
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 51, 20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 60, 20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 69, 20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 78, 20))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 87, 20))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_88._UseForTag(pyxb.namespace.ExpandedName(None, 'mu')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 52,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_88._UseForTag(pyxb.namespace.ExpandedName(None, 'mu2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 61,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_88._UseForTag(pyxb.namespace.ExpandedName(None, 'fdir1')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 70,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_88._UseForTag(pyxb.namespace.ExpandedName(None, 'slip1')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 79,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_88._UseForTag(pyxb.namespace.ExpandedName(None, 'slip2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 88,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_88._Automaton = _BuildAutomaton_88()
CTD_ANON_89._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'friction'), pyxb.binding.datatypes.double,
scope=CTD_ANON_89,
documentation='\n Coefficient of friction in the range of [0..1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 105,
20)))
CTD_ANON_89._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'friction2'), pyxb.binding.datatypes.double,
scope=CTD_ANON_89,
documentation='\n Coefficient of friction in the range of [0..1].\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 114,
20)))
CTD_ANON_89._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'fdir1'), vector3, scope=CTD_ANON_89,
documentation='\n 3-tuple specifying direction of mu1 in the collision local reference frame.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 123,
20)))
CTD_ANON_89._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'rolling_friction'), pyxb.binding.datatypes.double,
scope=CTD_ANON_89,
documentation='\n coefficient of friction in the range of [0..1]\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 132,
20)))
def _BuildAutomaton_89():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_89
del _BuildAutomaton_89
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 104,
20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 113,
20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 122,
20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 131,
20))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_89._UseForTag(pyxb.namespace.ExpandedName(None, 'friction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 105,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_89._UseForTag(pyxb.namespace.ExpandedName(None, 'friction2')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 114,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_89._UseForTag(pyxb.namespace.ExpandedName(None, 'fdir1')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 123,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_89._UseForTag(pyxb.namespace.ExpandedName(None, 'rolling_friction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 132, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_89._Automaton = _BuildAutomaton_89()
CTD_ANON_90._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'collide_without_contact'),
pyxb.binding.datatypes.boolean, scope=CTD_ANON_90,
documentation='\n Flag to disable contact force generation, while still allowing collision checks and contact visualization to occur.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 153, 14)))
CTD_ANON_90._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'collide_without_contact_bitmask'),
pyxb.binding.datatypes.unsignedInt, scope=CTD_ANON_90,
documentation='\n Bitmask for collision filtering when collide_without_contact is on \n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/surface.xsd', 162, 14)))
CTD_ANON_90._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'collide_bitmask'), pyxb.binding.datatypes.unsignedInt,
scope=CTD_ANON_90,
documentation='\n Bitmask for collision filtering. This will override collide_without_contact\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 171,
14)))
CTD_ANON_90._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'ode'), CTD_ANON_91, scope=CTD_ANON_90,
documentation='\n ODE contact parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 180,
14)))
CTD_ANON_90._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bullet'), CTD_ANON_92, scope=CTD_ANON_90,
documentation='\n Bullet contact parameters\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 247,
14)))
def _BuildAutomaton_90():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_90
del _BuildAutomaton_90
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 152,
14))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 161,
14))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 170,
14))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 179,
14))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 246,
14))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_90._UseForTag(pyxb.namespace.ExpandedName(None, 'collide_without_contact')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 153, 14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_90._UseForTag(pyxb.namespace.ExpandedName(None, 'collide_without_contact_bitmask')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 162, 14))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_90._UseForTag(pyxb.namespace.ExpandedName(None, 'collide_bitmask')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 171, 14))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_90._UseForTag(pyxb.namespace.ExpandedName(None, 'ode')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 180,
14))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_90._UseForTag(pyxb.namespace.ExpandedName(None, 'bullet')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 247,
14))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_90._Automaton = _BuildAutomaton_90()
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'soft_cfm'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n Soft constraint force mixing.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 189,
20)))
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'soft_erp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n Soft error reduction parameter\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 198,
20)))
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'kp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n dynamically "stiffness"-equivalent coefficient for contact joints\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 207,
20)))
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'kd'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n dynamically "damping"-equivalent coefficient for contact joints\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 216,
20)))
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_vel'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n maximum contact correction velocity truncation term.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 225,
20)))
CTD_ANON_91._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_depth'), pyxb.binding.datatypes.double,
scope=CTD_ANON_91,
documentation='\n minimum allowable depth before contact correction impulse is applied\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 234,
20)))
def _BuildAutomaton_91():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_91
del _BuildAutomaton_91
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 188,
20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 197,
20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 206,
20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 215,
20))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 224,
20))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 233,
20))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'soft_cfm')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 189,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'soft_erp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 198,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'kp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 207,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'kd')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 216,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'max_vel')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 225,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_91._UseForTag(pyxb.namespace.ExpandedName(None, 'min_depth')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 234,
20))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_5, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, False)]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_91._Automaton = _BuildAutomaton_91()
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'soft_cfm'), pyxb.binding.datatypes.double,
scope=CTD_ANON_92,
documentation='\n Soft constraint force mixing.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 256,
20)))
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'soft_erp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_92,
documentation='\n Soft error reduction parameter\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 265,
20)))
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'kp'), pyxb.binding.datatypes.double,
scope=CTD_ANON_92,
documentation='\n dynamically "stiffness"-equivalent coefficient for contact joints\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 274,
20)))
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'kd'), pyxb.binding.datatypes.double,
scope=CTD_ANON_92,
documentation='\n dynamically "damping"-equivalent coefficient for contact joints\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 283,
20)))
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'split_impulse'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_92,
documentation="\n Similar to ODE's max_vel implementation. See http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse for more information.\n ",
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 292,
20)))
CTD_ANON_92._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'split_impulse_penetration_threshold'),
pyxb.binding.datatypes.double, scope=CTD_ANON_92,
documentation="\n Similar to ODE's max_vel implementation. See http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse for more information.\n ",
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 301,
20)))
def _BuildAutomaton_92():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_92
del _BuildAutomaton_92
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 255,
20))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 264,
20))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 273,
20))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 282,
20))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'soft_cfm')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 256,
20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'soft_erp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 265,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'kp')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 274,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'kd')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 283,
20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'split_impulse')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 292,
20))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_92._UseForTag(pyxb.namespace.ExpandedName(None, 'split_impulse_penetration_threshold')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 301, 20))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_92._Automaton = _BuildAutomaton_92()
CTD_ANON_93._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'dart'), CTD_ANON_94, scope=CTD_ANON_93,
documentation='\n soft contact pamameters based on paper:\n http://www.cc.gatech.edu/graphics/projects/Sumit/homepage/papers/sigasia11/jain_softcontacts_siga11.pdf\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 322,
14)))
def _BuildAutomaton_93():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_93
del _BuildAutomaton_93
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 321,
14))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_93._UseForTag(pyxb.namespace.ExpandedName(None, 'dart')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 322,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_93._Automaton = _BuildAutomaton_93()
CTD_ANON_94._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'bone_attachment'), pyxb.binding.datatypes.double,
scope=CTD_ANON_94,
documentation='\n This is variable k_v in the soft contacts paper. Its unit is N/m.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 333,
20)))
CTD_ANON_94._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'stiffness'), pyxb.binding.datatypes.double,
scope=CTD_ANON_94,
documentation='\n This is variable k_e in the soft contacts paper. Its unit is N/m.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 342,
20)))
CTD_ANON_94._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'damping'), pyxb.binding.datatypes.double,
scope=CTD_ANON_94,
documentation='\n Viscous damping of point velocity in body frame. Its unit is N/m/s.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 351,
20)))
CTD_ANON_94._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'flesh_mass_fraction'), pyxb.binding.datatypes.double,
scope=CTD_ANON_94,
documentation='\n Fraction of mass to be distributed among deformable nodes.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 360,
20)))
def _BuildAutomaton_94():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_94
del _BuildAutomaton_94
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_94._UseForTag(pyxb.namespace.ExpandedName(None, 'bone_attachment')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 333, 20))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_94._UseForTag(pyxb.namespace.ExpandedName(None, 'stiffness')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 342,
20))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_94._UseForTag(pyxb.namespace.ExpandedName(None, 'damping')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 351,
20))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
CTD_ANON_94._UseForTag(pyxb.namespace.ExpandedName(None, 'flesh_mass_fraction')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/surface.xsd', 360, 20))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_94._Automaton = _BuildAutomaton_94()
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'essid'), pyxb.binding.datatypes.string,
scope=CTD_ANON_95,
documentation='\n Service set identifier (network name)\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 13,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'frequency'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Specifies the frequency of transmission in MHz\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 22,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'min_frequency'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Only a frequency range is filtered. Here we set the lower bound (MHz).\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 31,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'max_frequency'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Only a frequency range is filtered. Here we set the upper bound (MHz).\n \n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 41,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'gain'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Specifies the antenna gain in dBi\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 51,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'power'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Specifies the transmission power in dBm\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 60,
8)))
CTD_ANON_95._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'sensitivity'), pyxb.binding.datatypes.double,
scope=CTD_ANON_95,
documentation='\n Mininum received signal power in dBm\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 69,
8)))
def _BuildAutomaton_95():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_95
del _BuildAutomaton_95
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 12,
8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 21,
8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 30,
8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 40,
8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd', 68,
8))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'essid')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
13, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'frequency')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
22, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'min_frequency')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
31, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'max_frequency')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
41, 8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'gain')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
51, 8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'power')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
60, 8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_95._UseForTag(pyxb.namespace.ExpandedName(None, 'sensitivity')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/transceiver.xsd',
69, 8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_95._Automaton = _BuildAutomaton_95()
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'geometry'), CTD_ANON_19, scope=CTD_ANON_96,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/geometry.xsd', 17, 2)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'material'), CTD_ANON_67, scope=CTD_ANON_96,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/material.xsd', 9, 2)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'plugin'), CTD_ANON_73, scope=CTD_ANON_96,
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/plugin.xsd', 9, 2)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'cast_shadows'), pyxb.binding.datatypes.boolean,
scope=CTD_ANON_96,
documentation='\n If true the visual will cast shadows.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 16, 8)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'laser_retro'), pyxb.binding.datatypes.double,
scope=CTD_ANON_96,
documentation='\n will be implemented in the future release.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 25, 8)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'transparency'), pyxb.binding.datatypes.double,
scope=CTD_ANON_96,
documentation='\n The amount of transparency( 0=opaque, 1 = fully transparent)\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 34, 8)))
CTD_ANON_96._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'pose'), pose, scope=CTD_ANON_96,
documentation='\n The reference frame of the visual element, relative to the reference frame of the link.\n ',
location=pyxb.utils.utility.Location(
'http://sdformat.org/schemas/visual.xsd', 43, 8)))
CTD_ANON_96._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'meta'), CTD_ANON_97, scope=CTD_ANON_96,
documentation='\n Optional meta information for the visual. The information contained within this element should be used to provide additional feedback to an end user.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 52, 8)))
def _BuildAutomaton_96():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_96
del _BuildAutomaton_96
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 15, 8))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 24, 8))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 33, 8))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 42, 8))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 51, 8))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(None, 'cast_shadows')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 16,
8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(None, 'laser_retro')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 25,
8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(None, 'transparency')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 34,
8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(None, 'pose')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 43,
8))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(None, 'meta')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 52,
8))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'material')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 73,
8))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geometry')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 74,
8))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_96._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'plugin')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 75,
8))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False)]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False)]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False)]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True)]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False)]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False)]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_96._Automaton = _BuildAutomaton_96()
CTD_ANON_97._AddElement(
pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, 'layer'), pyxb.binding.datatypes.int,
scope=CTD_ANON_97,
documentation='\n The layer in which this visual is displayed. The layer number is useful for programs, such as Gazebo, that put visuals in different layers for enhanced visualization.\n ',
location=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 61, 14)))
def _BuildAutomaton_97():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_97
del _BuildAutomaton_97
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1,
metadata=pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 60, 14))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_97._UseForTag(pyxb.namespace.ExpandedName(None, 'layer')),
pyxb.utils.utility.Location('http://sdformat.org/schemas/visual.xsd', 61,
14))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True)]))
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, False)]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_97._Automaton = _BuildAutomaton_97()
|
HBPNeurorobotics/BlenderRobotDesigner
|
robot_designer_plugin/export/urdf/generic/sdf_model_dom.py
|
Python
|
gpl-2.0
| 893,051
|
[
"Gaussian"
] |
926ad06c0efa7eb7ffb2a57573983b24c3375f326eb125d8da6bae6a4648d145
|
import logging
import sys
import socket
import codecs
import json
import hashlib
import shutil
import urlparse
import urllib
import os
import webbrowser
import subprocess
import requests
import requesocks
def idle_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def set_ca_certs_env(filepath):
os.environ['CA_BUNDLE'] = filepath
def get_ca_certs_env():
return os.getenv('CA_BUNDLE', "")
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(self.level, message)
def flush(self):
pass
def init_logging():
if len(sys.argv)>1 and sys.argv[1] == "--debug":
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
ch = logging.FileHandler("firefly.log")
ch.setFormatter(logging.Formatter('[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'))
logger.addHandler(ch)
sys.stdout = LoggerWriter(logger, logging.DEBUG)
sys.stderr = LoggerWriter(logger, logging.DEBUG)
else:
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setFormatter(logging.Formatter('[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'))
logger.addHandler(ch)
def load_file(filename, idna=True):
f = codecs.open(filename, "r", "utf-8")
data = [s.strip() for s in f.readlines()]
f.close()
data = [s for s in data if s and not s.startswith('#')]
if not idna:
return data
ret = []
for s in data:
try:
parts = s.split(" ")
ret.append(" ".join([x.encode('idna') for x in parts]))
except UnicodeError:
pass
return ret
def parse_url(url):
u = urlparse.urlsplit(url)
scheme = u.scheme
host = u.netloc
port = None
if ":" in u.netloc:
s = u.netloc.split(":")
host = s[0]
port = int(s[1])
path = urlparse.urlunsplit(("", "", u.path, u.query, ""))
return str(scheme), str(urllib.quote(host)), port, str(urllib.quote(path))
def remote_fetch_with_proxy(url, proxy_info):
if 'socks' in json.dumps(proxy_info):
s = requesocks.Session()
else:
s = requests.Session()
s.trust_env = False
s.proxies = proxy_info
retry = 2
while True:
try:
r = s.get(url, verify=get_ca_certs_env())
if r.status_code != requests.codes.ok: # @UndefinedVariable
raise Exception("invalid response")
return r.text.encode("utf-8")
except Exception, e:
if retry > 0:
print str(e), "give it another chance ..."
retry -= 1
else:
raise
def remote_update_datafile(proxy, meta, metafile, metaurl, datafile, dataurl):
updated = False
r1 = remote_fetch_with_proxy(metaurl, proxy)
new_meta = json.loads(r1)
if new_meta['date'] != meta['date']:
r2 = remote_fetch_with_proxy(dataurl, proxy)
hasher = hashlib.sha1()
hasher.update(r2)
if hasher.hexdigest() == new_meta['sha1']:
with codecs.open(metafile, "w", "utf-8") as f1:
f1.write(r1.decode("utf-8"))
with codecs.open(datafile, "w", "utf-8") as f2:
f2.write(r2.decode("utf-8"))
updated = True
return updated
def local_update_datafile(data, datafile):
filename = datafile + ".tmp"
f = codecs.open(filename, "w", "utf-8")
f.write(data)
f.close()
shutil.move(filename, datafile)
def singleton_check(rootdir):
f = None
lock = os.path.join(rootdir, os.name+"lock")
if os.name == 'nt':
try:
if os.path.exists(lock):
os.unlink(lock)
f = os.open(lock, os.O_CREAT | os.O_EXCL | os.O_RDWR)
except EnvironmentError, e:
if e.errno != 13:
print str(e)
return False
else:
try:
import fcntl
f = open(lock, 'w')
fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
except EnvironmentError, e:
if not f is not None:
print str(e)
return False
return f
def singleton_clean(rootdir, f):
lock = os.path.join(rootdir, os.name+"lock")
try:
if os.name == 'nt':
os.close(f)
os.unlink(lock)
else:
import fcntl
fcntl.lockf(f, fcntl.LOCK_UN)
f.close() # ???
os.unlink(lock)
except Exception, e:
print str(e)
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, _ = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def open_url(url):
if sys.platform=='darwin':
subprocess.Popen(['open', url])
else:
webbrowser.open(url)
|
Jonavin/firefly-proxy
|
lib/utils.py
|
Python
|
bsd-2-clause
| 5,423
|
[
"Firefly"
] |
10a51bea8b70ec5c9ff7cfbeec4ac20f4e76bbe172fd1c4e357bc3b873bac823
|
from playground.molecule.molecule import Molecule, Atom
from playground.molecule.molecularsystem import MolecularSystem
import pele.utils.elements as elem
import networkx as nx
import numpy as np
import unittest
import os
class TestMolecule(unittest.TestCase):
"""
a base class for molecular system unit tests
"""
def setUp(self):
""" Function creates a reference molecular system with known data"""
self.generate_ref_system('basic');
def generate_ref_system(self, ref_system_type):
# generate a reference system that depends on the keyword selected
if ref_system_type == 'basic':
self.gen_basic_ref_system()
else:
# default to basic refer system
self.gen_basic_ref_system()
def gen_basic_ref_system(self):
# Generate comparison coords list
self.coords = [4.954, -0.924, -5.684,
5.427, 0.193, -4.880,
5.873, -0.251, -3.503,
5.756, -1.417, -3.054]
self.atom_names = ['N', 'S', 'CA', 'O']
self.atom_symbols = [elem.alternate_names[name] for name in self.atom_names]
self.residue = 'GLY'
self.resid = 1
self.chain = 'A'
# Generate the Atom objects - zero based index ordered as in file.
N = Atom(0, 'N')
S = Atom(1, 'S')
CA = Atom(2, 'CA')
O = Atom(3, 'O')
# Create the graph
self.mol_graph = nx.Graph()
# add the nodes along with an attribute - the symbol - used to test equivalence of nodes between molcules.
self.mol_graph.add_node(N, atom=N)
self.mol_graph.add_node(S, atom=S)
self.mol_graph.add_node(CA, atom=CA)
self.mol_graph.add_node(O, atom=O)
# add the edges. don't care so much about the order here.
self.mol_graph.add_edge(N, S, hweight = np.sqrt(N.mass * S.mass))
self.mol_graph.add_edge(S, CA, hweight = np.sqrt(S.mass * CA.mass))
self.mol_graph.add_edge(CA, O, hweight = np.sqrt(CA.mass * O.mass))
# create a molecule system from the test data
reference_molecule = Molecule(0, self.coords, self.mol_graph)
# create a molecular system from the data
self.reference_molecular_system = MolecularSystem()
self.reference_molecular_system.add_molecule(reference_molecule)
def test_parse_xyz(self):
# Generate test data - dumps the reference system to an xyz file.
xyz_data = []
num_atoms = len(self.atom_names)
xyz_data.append(" " + str(num_atoms) + "\n")
xyz_data.append(" Energy Comment\n")
coord_index = 0
for atom_symbol in self.atom_symbols:
l = '{:1} {: >8.3f}{: >8.3f}{: >8.3f}\n'.format(atom_symbol,
self.coords[3 * coord_index],
self.coords[3 * coord_index + 1],
self.coords[3 * coord_index + 2])
xyz_data.append(l)
coord_index += 1
# Save test data as a file
try:
with open('test.xyz', "w") as f_out:
f_out.writelines(xyz_data)
except IOError:
raise Exception("Unable to write xyz file for output")
# calls the testing function
self.check_Molecule_System('test.xyz', 'pymol', args='-qc')
# delete the test file
os.remove('test.xyz')
def test_molecule_pdb(self):
# Generate test data - dumps the reference data to a dummy PDB
pdb_data = []
coord_index = 0
for atom_name, atom_symbol in zip(self.atom_names, self.atom_symbols):
l = 'ATOM {: >06d} {: <4} {:3} {:1}{: >4d} {: >8.3f}{: >8.3f}{: >8.3f} 1.00 5.04 {: >2}\n'.format(
coord_index + 1,
atom_name,
self.residue,
self.chain,
int(self.resid),
self.coords[3 * coord_index],
self.coords[3 * coord_index + 1],
self.coords[3 * coord_index + 2],
atom_symbol)
pdb_data.append(l)
coord_index += 1
# Save test data as a file
try:
with open('test.pdb', "w") as f_out:
f_out.writelines(pdb_data)
except IOError:
raise Exception("Unable to write pdb file for output")
# calls the checking function to assert that the test molecular system
# and reference molecular system are the same.
self.check_Molecule_System('test.pdb', parser='pymol', args='-qc')
# delete the test file
os.remove('test.pdb')
def check_Molecule_System(self, l_filename, parser='pymol', args='-qc'):
# Create the molecular system
self.test_molecular_system = MolecularSystem()
# use the molecular system to load in the filename and create the molecule.
self.test_molecular_system.load_file(l_filename, parser, args)
# Check that the molecular system loaded into the test_molecular system
# is equal to the molecular system in the reference molecular system
self.assertEqual(self.test_molecular_system, self.reference_molecular_system)
def test_atom(self):
atom = Atom(1, '1H')
self.assertEqual(1, atom.id)
self.assertEqual('1H', atom.alt_symbol)
self.assertEqual('hydrogen', atom.name)
self.assertEqual('H', atom.symbol)
self.assertEqual([1.000, 1.000, 1.000], atom.color)
self.assertEqual(1.00794000, atom.mass)
self.assertEqual(1.2000, atom.radius)
atom2 = Atom(2, 'Na')
self.assertEqual(2, atom2.id)
self.assertEqual('Na', atom2.alt_symbol)
self.assertEqual('sodium', atom2.name)
self.assertEqual('Na', atom2.symbol)
self.assertEqual([0.671, 0.361, 0.949], atom2.color)
self.assertEqual(22.98976928, atom2.mass)
self.assertEqual(2.2700, atom2.radius)
if __name__ == '__main__':
unittest.main()
|
cjforman/pele
|
playground/molecule/test/molecule_test.py
|
Python
|
gpl-3.0
| 6,230
|
[
"PyMOL"
] |
d5062a2c4250a33bd0ffd9819c5434bed44b6dcf1f8a568c9ea888ccf2fc78f4
|
"""
This file contains a library of automated image processing effects that borrow
heavily from the simpler effects found in viztools.py
"""
from __future__ import print_function
from __future__ import division
from os import listdir
from os.path import join, isfile
import cv2
import numpy as np
from vidviz.effects import Effect
from vidviz.utils import SmoothNoise
from vidviz.utils import resize
class HueSwirlChain(Effect):
"""
Create bloom effect around thresholded version of input frame then melt it
with iteratively applying blur kernels. Iterative blur occurs
automatically until reaching a stopping point defined in ITER_INDEX dict,
then begins to transition to an unblurred version of a new image.
KEYBOARD INPUTS:
t - toggle between effect types
w - toggle random walk
a - toggle automatic behavior (vs keyboard input)
-/+ - decrease/increase random matrix size
[/] - decrease/increase bloom size
;/' - decrease/increase mask blur kernel
,/. - decrease/increase final masking offset value
lr arrows - decrease/increase offset in background huespace
/ - reset parameters
spacebar - quit hue-swirl-chain (transition to new input source)
"""
def __init__(self, frame_height, frame_width):
super(HueSwirlChain, self).__init__()
self.name = 'hue-swirl-chain'
# user option constants
DIM_SIZE = {
'desc': 'dimension of background random matrix height/width',
'name': 'dim_size',
'val': 1,
'init': 1,
'min': 1,
'max': 100,
'mod': self.inf,
'step': 1,
'inc': False,
'dec': False}
BACKGROUND_BLUR_KERNEL = {
'desc': 'kernel size for Gaussian blur that produces bloom',
'name': 'back_blur',
'val': 19,
'init': 19,
'min': 3,
'max': 31,
'mod': self.inf,
'step': 2,
'inc': False,
'dec': False}
MASK_BLUR_KERNEL = {
'desc': 'kernel size for Gauss/med blur that acts on mask',
'name': 'mask_blur',
'val': 5,
'init': 5,
'min': 5,
'max': 31,
'mod': self.inf,
'step': 2,
'inc': False,
'dec': False}
FINAL_MASK_OFFSET = {
'desc': 'mask is subtracted from this value before final masking',
'name': 'mask_offset',
'val': 235,
'init': 235,
'min': -self.inf,
'max': self.inf,
'mod': 256,
'step': 5,
'inc': False,
'dec': False}
ITER_INDEX = {
'desc': '(no user input) index into blurring iterations',
'name': 'iter_index',
'val': 0,
'init': 0,
'min': 0,
'max': 75,
'mod': self.inf,
'step': 1,
'inc': False,
'dec': False}
HUE_OFFSET = {
'desc': '(no user input) ' +
'hue value offset for background frame',
'name': 'hue_offset',
'val': 0,
'init': 0,
'min': -self.inf,
'max': self.inf,
'mod': 180,
'step': 5,
'inc': False,
'dec': False}
self.MAX_NUM_STYLES = 2
# combine dicts into a list for easy general access
self.props = [
DIM_SIZE,
BACKGROUND_BLUR_KERNEL,
MASK_BLUR_KERNEL,
FINAL_MASK_OFFSET,
ITER_INDEX,
HUE_OFFSET]
# user options
self.style = 0
self.auto_play = True
self.reinitialize = False
self.random_walk = True
self.chan_vec_pos = np.zeros((1, 1))
self.noise = SmoothNoise(
num_samples=10,
num_channels=self.chan_vec_pos.size)
self.frame_width = frame_width
self.frame_height = frame_height
# get source images
source_dir = \
'/home/mattw/Dropbox/Dropbox/github/vid-viz/data/deep-dream/'
self.file_list = [join(source_dir, f) for f in listdir(source_dir)
if isfile(join(source_dir, f))]
self.num_files = len(self.file_list)
# intialize other parameters
self.reset()
def reset(self):
# reset base class attributes
super(HueSwirlChain, self).reset()
self.prev_mask_blur = 0 # to initialize frame_mask_list
self.prev_hue_offset = self.props[5]['init']
self.prev_dim_size = self.props[0]['init']
# background frame parameters
self.frame_back_0 = np.ones(
(self.props[0]['init'], self.props[0]['init'], 3))
self.frame_back_0[:, :, 0] = \
np.random.rand(self.props[0]['init'], self.props[0]['init'])
self.frame_back = None
self.auto_play = True
# frame parameters
self.curr_frame_index = 0
self.file_index = 0
frame_0 = cv2.imread(self.file_list[self.file_index])
frame_0 = resize(frame_0, self.frame_width, self.frame_height)
self.file_index += 1
frame_1 = cv2.imread(self.file_list[self.file_index])
frame_1 = resize(frame_1, self.frame_width, self.frame_height)
self.file_index += 1
self.frame = [frame_0, frame_1]
# mask parameters
self.frame_masks = [None, None]
self.frame_mask_list = [
[None for _ in range(self.props[4]['max'] + 1)],
[None for _ in range(self.props[4]['max'] + 1)]]
self.num_blend_levels = self.props[4]['max']
self.curr_blend_level = 0
# control parameters
self.increase_blur_index = True
self.increase_blend_index = True
self.increase_source_index = False
def process(self, key_list, key_lock=False):
# update if blur kernel toggled
# if key_list[ord('t')]:
# reset_iter_seq = True
# else:
# reset_iter_seq = False
# control parameters (use _process_io for clipping and modding)
if self.auto_play:
# self.props[3]['val'] += 1 # final mask offset
self.props[5]['val'] += 0.1 # hue offset
# process keyboard input
if not key_lock:
self._process_io(key_list)
if self.reinitialize:
"""TODO"""
self.reinitialize = False
self.chan_vec_pos = np.zeros((1, 1))
self.noise.reinitialize()
for index, _ in enumerate(self.props):
self.props[index]['val'] = self.props[index]['init']
self.frame_mask_list = \
[None for _ in range(self.props[4]['max'] + 1)]
self.increase_index = True
self.increase_meta_index = True
# control parameters - blend
if self.style == 0:
# |_|
# if increasing blur index, use blurred mask from original image
# if decreasing blur index, use a linear combination of blurred
# masks from original and new images
if not self.increase_blur_index:
# increase blend parameter
self.curr_blend_level += 1
elif self.style == 1:
# |/
pass
elif self.style == 2:
# \/
pass
# control parameters - blur
if self.increase_blur_index:
self.props[4]['val'] += 1
else:
self.props[4]['val'] -= 1
if self.props[4]['val'] == self.props[4]['max']:
self.increase_blur_index = False
if self.props[4]['val'] == self.props[4]['min']:
self.increase_blur_index = True
reset_iter_seq = True
else:
reset_iter_seq = False
# control parameters - source
if reset_iter_seq:
# reset part of mask list so new image can take over
self.frame_mask_list[self.curr_frame_index] = \
[None for _ in range(self.props[4]['max'] + 1)]
# load new image
temp_frame = cv2.imread(self.file_list[self.file_index])
hw = temp_frame.shape
temp_frame = cv2.getRectSubPix(
temp_frame,
(256, 256),
(hw[1]/2, hw[0]/2))
self.frame[self.curr_frame_index] = temp_frame
self.frame[self.curr_frame_index] = resize(
self.frame[self.curr_frame_index],
self.frame_width,
self.frame_height)
self.file_index = (self.file_index + 1) % self.num_files
# update curr frame index to old image
self.curr_frame_index = (self.curr_frame_index + 1) % 2
# reset blending param
self.curr_blend_level = 0
# human-readable names
dim_size = self.props[0]['val']
back_blur = self.props[1]['val']
mask_blur = self.props[2]['val']
final_offset = self.props[3]['val']
iter_index = self.props[4]['val']
hue_offset = self.props[5]['val']
curr_fr_indx = self.curr_frame_index
next_fr_indx = (curr_fr_indx + 1) % 2
# create new random matrix if necessary
if int(dim_size) is not int(self.prev_dim_size):
self.prev_dim_size = dim_size
self.frame_back_0 = np.ones((dim_size, dim_size, 3))
self.frame_back_0[:, :, 2] = np.random.rand(dim_size, dim_size)
self.frame_back = None
# create background frame if necessary
if self.frame_back is None:
# get resized background
self.frame_back = cv2.resize(
self.frame_back_0,
(self.frame_width, self.frame_height),
interpolation=cv2.INTER_LINEAR)
self.frame_back[:, :, 0] = 179.0 * self.frame_back[:, :, 0]
self.frame_back[:, :, 1:3] = 255.0 * self.frame_back[:, :, 1:3]
self.frame_back = self.frame_back.astype('uint8')
self.frame_back = cv2.cvtColor(self.frame_back, cv2.COLOR_HSV2BGR)
# update background frame if necessary
if int(hue_offset) is not int(self.prev_hue_offset):
self.frame_back = cv2.cvtColor(self.frame_back, cv2.COLOR_BGR2HSV)
# uint8s don't play nice with subtraction
self.frame_back[:, :, 0] += abs(
int(hue_offset - self.prev_hue_offset))
self.frame_back[:, :, 0] = np.mod(self.frame_back[:, :, 0],
180)
self.frame_back = cv2.cvtColor(self.frame_back, cv2.COLOR_HSV2BGR)
self.prev_hue_offset = hue_offset
# get mask if necessary
if int(mask_blur) is not int(
self.prev_mask_blur) or reset_iter_seq:
# blur kernel changed; restart iteration sequence
# self.props[4]['val'] = self.props[4]['init']
# iter_index = self.props[4]['val']
# self.increase_index = True
# self.frame_mask_list = \
# [None for _ in range(self.props[4]['max'] + 1)]
# get new mask
frame_gray = cv2.cvtColor(
self.frame[curr_fr_indx],
cv2.COLOR_BGR2GRAY)
frame_gray = cv2.adaptiveThreshold(
frame_gray,
255,
cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY_INV,
51,
10)
self.frame_mask_list[curr_fr_indx][0] = cv2.medianBlur(
frame_gray,
mask_blur)
frame_gray = cv2.cvtColor(
self.frame[next_fr_indx],
cv2.COLOR_BGR2GRAY)
frame_gray = cv2.adaptiveThreshold(
frame_gray,
255,
cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY_INV,
51,
10)
self.frame_mask_list[next_fr_indx][0] = cv2.medianBlur(
frame_gray,
mask_blur)
self.prev_mask_blur = mask_blur
# update masks if necessary
for fr_indx in range(2):
if (self.frame_mask_list[fr_indx][iter_index]) is None \
and (iter_index % 2 == 0):
# need to update and store frame mask
# two blur passes from previously stored mask
if self.style == 0:
frame_mask_temp = cv2.GaussianBlur(
self.frame_mask_list[fr_indx][iter_index - 2],
(mask_blur, mask_blur),
0)
frame_mask_temp = cv2.GaussianBlur(
frame_mask_temp,
(mask_blur, mask_blur),
0)
elif self.style == 1:
frame_mask_temp = cv2.medianBlur(
self.frame_mask_list[fr_indx][iter_index - 2],
mask_blur)
frame_mask_temp = cv2.medianBlur(
frame_mask_temp,
mask_blur)
self.frame_mask_list[fr_indx][iter_index] = frame_mask_temp
self.frame_masks[fr_indx] = frame_mask_temp
elif (self.frame_mask_list[fr_indx][iter_index] is None) and \
(iter_index % 2 == 1):
# need to update but not store frame mask
if self.style == 0:
self.frame_masks[fr_indx] = cv2.GaussianBlur(
self.frame_mask_list[fr_indx][iter_index - 1],
(mask_blur, mask_blur),
0)
elif self.style == 1:
self.frame_masks[fr_indx] = cv2.medianBlur(
self.frame_mask_list[fr_indx][iter_index - 1],
mask_blur)
# combine masks
if self.curr_blend_level is not 0:
# blend masks
frame_mask = cv2.addWeighted(
self.frame_masks[curr_fr_indx],
1.0 - self.curr_blend_level / self.num_blend_levels,
self.frame_masks[next_fr_indx],
self.curr_blend_level / self.num_blend_levels,
0)
else:
frame_mask = self.frame_masks[curr_fr_indx]
# get masked then blurred background
frame_back_blurred = np.zeros(self.frame_back.shape, dtype='uint8')
for chan in range(3):
frame_back_blurred[:, :, chan] = cv2.bitwise_and(
self.frame_back[:, :, chan],
frame_mask)
frame_back_blurred = cv2.GaussianBlur(
frame_back_blurred,
(back_blur, back_blur),
0)
# remask blurred background
frame = np.zeros(self.frame_back.shape, dtype='uint8')
for chan in range(3):
frame[:, :, chan] = cv2.bitwise_and(
frame_back_blurred[:, :, chan],
final_offset - frame_mask)
# _, frame = cv2.threshold(frame, 32, 255, cv2.THRESH_BINARY)
return frame
def print_update(self, force=False):
"""Print effect settings to console if not changed automatically"""
if (self.update_output != 4) and (self.update_output != 5):
super(HueSwirlChain, self).print_update(force=force)
class Ball(object):
"""Helper class for BouncingBalls class"""
def __init__(self, radius, center, velocity, frame_size):
self.rad = radius
self.pos = center
self.vel = velocity
self.frame_size = frame_size
def update_position(self):
for i in range(3):
self.pos[i] += self.vel[i]
if self.pos[i] + self.rad >= self.frame_size[i]:
# set position to border
self.pos[i] = self.frame_size[i] - self.rad
# reverse velocity
self.vel[i] *= -1
elif self.pos[i] - self.rad <= 0:
# set postion to border
self.pos[i] = self.rad
# reverse velocity
self.vel[i] *= -1
def draw(self, frame):
# note that center arg has reversed height/width ordering (grrr)
cv2.circle(
img=frame, center=(self.pos[1], self.pos[0]), radius=self.rad,
color=(255, 255, 255), thickness=-1)
# return frame
class BouncingBalls(Effect):
"""
Circles that bounce around in frame
KEYBOARD INPUTS:
t - toggle between effect types
w - toggle random walk [currently not used]
a - toggle automatic behavior (vs keyboard input) [currently not used]
-/+ - decrease/increase number of balls
[/] - decrease/increase mean of ball radius (drawn from distribution)
;/' - None
,/. - None
lr arrows - None
/ - reset parameters
spacebar - quit bouncing-balls (transition to new input source)
"""
def __init__(self, frame_height, frame_width):
super(BouncingBalls, self).__init__()
self.name = 'bouncing-balls'
# user option constants
NUM_BALLS = {
'desc': 'number of balls',
'name': 'num_balls',
'val': 4,
'init': 4,
'min': 1,
'max': 32,
'mod': self.inf,
'step': 1,
'inc': False,
'dec': False}
BALL_RADIUS = {
'desc': 'ball radius in pixels',
'name': 'ball_radius',
'val': 100,
'init': 100,
'min': 50,
'max': 500,
'mod': self.inf,
'step': 10,
'inc': False,
'dec': False}
self.MAX_NUM_STYLES = 2 #
# combine dicts into a list for easy general access
self.props = [
NUM_BALLS,
BALL_RADIUS,
self.none_dict,
self.none_dict,
self.none_dict,
self.none_dict]
# user options
self.style = 0
self.auto_play = True
self.reinitialize = False
self.random_walk = False
self.chan_vec_pos = np.zeros((1, 1))
self.noise = SmoothNoise(
num_samples=10,
num_channels=self.chan_vec_pos.size)
self.frame_width = frame_width
self.frame_height = frame_height
# intialize other parameters
self.reset()
def reset(self):
# reset base class attributes
super(BouncingBalls, self).reset()
self.auto_play = False
# ball parameters
self.balls = []
for _ in range(self.props[0]['val']):
ball_rad = np.random.randint(50, 150)
lower_height = ball_rad + 1
upper_height = self.frame_height - ball_rad - 1
lower_width = ball_rad + 1
upper_width = self.frame_width - ball_rad - 1
ball_pos = [
np.random.randint(lower_height, upper_height),
np.random.randint(lower_width, upper_width),
0]
ball_vel = [np.random.randint(-8, 8), np.random.randint(-8, 8), 0]
frame_size = [self.frame_height, self.frame_width, 0]
self.balls.append(Ball(
radius=ball_rad, center=ball_pos, velocity=ball_vel,
frame_size=frame_size))
def process(self, key_list, key_lock=False):
# process keyboard input
if not key_lock:
self._process_io(key_list)
if self.reinitialize:
self.reinitialize = False
for index, _ in enumerate(self.props):
self.props[index]['val'] = self.props[index]['init']
# human-readable names
# num_balls = self.props[0]['val']
# ball_radius = self.props[1]['val']
# draw circles on black background
frame = np.zeros(
shape=(self.frame_height, self.frame_width), dtype='uint8')
for ball in self.balls:
ball.update_position()
ball.draw(frame)
return frame
def print_update(self, force=False):
"""Print effect settings to console if not changed automatically"""
super(BouncingBalls, self).print_update(force=force)
|
themattinthehatt/vid-viz
|
vidviz/auto.py
|
Python
|
mit
| 20,553
|
[
"Gaussian"
] |
437a54b52ef6859d6e8c106a6dc4622b71884fd282d2ede8f6ab9c1b3ad1dceb
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Generic output file parser and related tools"""
import bz2
import fileinput
import gzip
import inspect
import io
import logging
import os
import random
import sys
import zipfile
from abc import ABC, abstractmethod
import numpy
from cclib.parser import utils
from cclib.parser.data import ccData
from cclib.parser.data import ccData_optdone_bool
# This seems to avoid a problem with Avogadro.
logging.logMultiprocessing = 0
class myBZ2File(bz2.BZ2File):
"""Return string instead of bytes"""
def __next__(self):
line = super(bz2.BZ2File, self).__next__()
return line.decode("ascii", "replace")
def next(self):
line = self.__next__()
return line
class myGzipFile(gzip.GzipFile):
"""Return string instead of bytes"""
def __next__(self):
super_ob = super(gzip.GzipFile, self)
# seemingly different versions of gzip can have either next or __next__
if hasattr(super_ob, 'next'):
line = super_ob.next()
else:
line = super_ob.__next__()
return line.decode("ascii", "replace")
def next(self):
line = self.__next__()
return line
class FileWrapper:
"""Wrap a file-like object or stream with some custom tweaks"""
def __init__(self, source, pos=0):
self.src = source
# Most file-like objects have seek and tell methods, but streams returned
# by urllib.urlopen in Python2 do not, which will raise an AttributeError
# in this code. On the other hand, in Python3 these methods do exist since
# urllib uses the stream class in the io library, but they raise a different
# error, namely io.UnsupportedOperation. That is why it is hard to be more
# specific with except block here.
try:
self.src.seek(0, 2)
self.size = self.src.tell()
self.src.seek(pos, 0)
except (AttributeError, IOError, io.UnsupportedOperation):
# Stream returned by urllib should have size information.
if hasattr(self.src, 'headers') and 'content-length' in self.src.headers:
self.size = int(self.src.headers['content-length'])
else:
self.size = pos
# Assume the position is what was passed to the constructor.
self.pos = pos
self.last_line = None
def next(self):
line = next(self.src)
self.pos += len(line)
self.last_line = line
return line
def __next__(self):
return self.next()
def __iter__(self):
return self
def close(self):
self.src.close()
def seek(self, pos, ref):
# If we are seeking to end, we can emulate it usually. As explained above,
# we cannot be too specific with the except clause due to differences
# between Python2 and 3. Yet another reason to drop Python 2 soon!
try:
self.src.seek(pos, ref)
except:
if ref == 2:
self.src.read()
else:
raise
if ref == 0:
self.pos = pos
if ref == 1:
self.pos += pos
if ref == 2 and hasattr(self, 'size'):
self.pos = self.size
def openlogfile(filename, object=None):
"""Return a file object given a filename or if object specified decompresses it
if needed and wrap it up.
Given the filename or file object of a log file or a gzipped, zipped, or bzipped
log file, this function returns a file-like object.
Given a list of filenames, this function returns a FileInput object,
which can be used for seamless iteration without concatenation.
"""
# If there is a single string argument given.
if type(filename) in [str, str]:
extension = os.path.splitext(filename)[1]
if extension == ".gz":
fileobject = myGzipFile(filename, "r", fileobj=object)
elif extension == ".zip":
zip = zipfile.ZipFile(object, "r") if object else zipfile.ZipFile(filename, "r")
assert len(zip.namelist()) == 1, "ERROR: Zip file contains more than 1 file"
fileobject = io.StringIO(zip.read(zip.namelist()[0]).decode("ascii", "ignore"))
elif extension in ['.bz', '.bz2']:
# Module 'bz2' is not always importable.
assert bz2 is not None, "ERROR: module bz2 cannot be imported"
fileobject = myBZ2File(object, "r") if object else myBZ2File(filename, "r")
else:
# Assuming that object is text file encoded in utf-8
fileobject = io.StringIO(object.decode('utf-8')) if object \
else FileWrapper(io.open(filename, "r", errors='ignore'))
return fileobject
elif hasattr(filename, "__iter__"):
# This is needed, because fileinput will assume stdin when filename is empty.
if len(filename) == 0:
return None
return fileinput.input(filename, openhook=fileinput.hook_compressed)
class Logfile(ABC):
"""Abstract class for logfile objects.
Subclasses defined by cclib:
ADF, DALTON, GAMESS, GAMESSUK, Gaussian, Jaguar, Molpro, MOPAC,
NWChem, ORCA, Psi, Q-Chem
"""
def __init__(self, source, loglevel=logging.ERROR, logname="Log",
logstream=sys.stderr, datatype=ccData_optdone_bool, **kwds):
"""Initialise the Logfile object.
This should be called by a subclass in its own __init__ method.
Inputs:
source - a logfile, list of logfiles, or stream with at least a read method
loglevel - integer corresponding to a log level from the logging module
logname - name of the source logfile passed to this constructor
logstream - where to output the logging information
datatype - class to use for gathering data attributes
"""
# Set the filename to source if it is a string or a list of strings, which are
# assumed to be filenames. Otherwise, assume the source is a file-like object
# if it has a read method, and we will try to use it like a stream.
self.isfileinput = False
if isinstance(source, str):
self.filename = source
self.isstream = False
elif isinstance(source, list) and all([isinstance(s, str) for s in source]):
self.filename = source
self.isstream = False
elif isinstance(source, fileinput.FileInput):
self.filename = source
self.isstream = False
self.isfileinput = True
elif hasattr(source, "read"):
self.filename = "stream %s" % str(type(source))
self.isstream = True
self.stream = source
else:
raise ValueError("Unexpected source type.")
# Set up the logger.
# Note that calling logging.getLogger() with one name always returns the same instance.
# Presently in cclib, all parser instances of the same class use the same logger,
# which means that care needs to be taken not to duplicate handlers.
self.loglevel = loglevel
self.logname = logname
self.logger = logging.getLogger('%s %s' % (self.logname, self.filename))
self.logger.setLevel(self.loglevel)
if len(self.logger.handlers) == 0:
handler = logging.StreamHandler(logstream)
handler.setFormatter(logging.Formatter("[%(name)s %(levelname)s] %(message)s"))
self.logger.addHandler(handler)
# Set up the metadata.
if not hasattr(self, "metadata"):
self.metadata = {}
self.metadata["package"] = self.logname
self.metadata["methods"] = []
# Indicate if the computation has completed successfully
self.metadata['success'] = False
# Periodic table of elements.
self.table = utils.PeriodicTable()
# This is the class that will be used in the data object returned by parse(), and should
# normally be ccData or a subclass of it.
self.datatype = datatype
# Change the class used if we want optdone to be a list or if the 'future' option
# is used, which might have more consequences in the future.
optdone_as_list = kwds.get("optdone_as_list", False) or kwds.get("future", False)
optdone_as_list = optdone_as_list if isinstance(optdone_as_list, bool) else False
if optdone_as_list:
self.datatype = ccData
# Parsing of Natural Orbitals and Natural Spin Orbtials into one attribute
self.unified_no_nso = kwds.get("future",False)
def __setattr__(self, name, value):
# Send info to logger if the attribute is in the list of attributes.
if name in ccData._attrlist and hasattr(self, "logger"):
# Call logger.info() only if the attribute is new.
if not hasattr(self, name):
if type(value) in [numpy.ndarray, list]:
self.logger.info("Creating attribute %s[]" % name)
else:
self.logger.info("Creating attribute %s: %s" % (name, str(value)))
# Set the attribute.
object.__setattr__(self, name, value)
def parse(self, progress=None, fupdate=0.05, cupdate=0.002):
"""Parse the logfile, using the assumed extract method of the child."""
# Check that the sub-class has an extract attribute,
# that is callable with the proper number of arguemnts.
if not hasattr(self, "extract"):
raise AttributeError("Class %s has no extract() method." % self.__class__.__name__)
if not callable(self.extract):
raise AttributeError("Method %s._extract not callable." % self.__class__.__name__)
if len(inspect.getfullargspec(self.extract)[0]) != 3:
raise AttributeError("Method %s._extract takes wrong number of arguments." % self.__class__.__name__)
# Save the current list of attributes to keep after parsing.
# The dict of self should be the same after parsing.
_nodelete = list(set(self.__dict__.keys()))
# Initiate the FileInput object for the input files.
# Remember that self.filename can be a list of files.
if not self.isstream:
if not self.isfileinput:
inputfile = openlogfile(self.filename)
else:
inputfile = self.filename
else:
inputfile = FileWrapper(self.stream)
# Intialize self.progress
is_compressed = isinstance(inputfile, myGzipFile) or isinstance(inputfile, myBZ2File)
if progress and not (is_compressed):
self.progress = progress
self.progress.initialize(inputfile.size)
self.progress.step = 0
self.fupdate = fupdate
self.cupdate = cupdate
# Maybe the sub-class has something to do before parsing.
self.before_parsing()
# Loop over lines in the file object and call extract().
# This is where the actual parsing is done.
for line in inputfile:
self.updateprogress(inputfile, "Unsupported information", cupdate)
# This call should check if the line begins a section of extracted data.
# If it does, it parses some lines and sets the relevant attributes (to self).
# Any attributes can be freely set and used across calls, however only those
# in data._attrlist will be moved to final data object that is returned.
try:
self.extract(inputfile, line)
except StopIteration:
self.logger.error("Unexpectedly encountered end of logfile.")
break
except Exception as e:
self.logger.error("Encountered error when parsing.")
self.logger.error("Last line read: %s" % inputfile.last_line)
raise
# Close input file object.
if not self.isstream:
inputfile.close()
# Maybe the sub-class has something to do after parsing.
self.after_parsing()
# If atomcoords were not parsed, but some input coordinates were ("inputcoords").
# This is originally from the Gaussian parser, a regression fix.
if not hasattr(self, "atomcoords") and hasattr(self, "inputcoords"):
self.atomcoords = numpy.array(self.inputcoords, 'd')
# Set nmo if not set already - to nbasis.
if not hasattr(self, "nmo") and hasattr(self, "nbasis"):
self.nmo = self.nbasis
# Create a default coreelectrons array, unless it's impossible
# to determine.
if not hasattr(self, "coreelectrons") and hasattr(self, "natom"):
self.coreelectrons = numpy.zeros(self.natom, "i")
if hasattr(self, "incorrect_coreelectrons"):
self.__delattr__("coreelectrons")
# Create the data object we want to return. This is normally ccData, but can be changed
# by passing the datatype argument to the constructor. All supported cclib attributes
# are copied to this object, but beware that in order to be moved an attribute must be
# included in the data._attrlist of ccData (or whatever else).
# There is the possibility of passing assitional argument via self.data_args, but
# we use this sparingly in cases where we want to limit the API with options, etc.
data = self.datatype(attributes=self.__dict__)
# Now make sure that the cclib attributes in the data object are all the correct type,
# including arrays and lists of arrays.
data.arrayify()
# Delete all temporary attributes (including cclib attributes).
# All attributes should have been moved to a data object, which will be returned.
for attr in list(self.__dict__.keys()):
if not attr in _nodelete:
self.__delattr__(attr)
# Perform final checks on values of attributes.
data.check_values(logger=self.logger)
# Update self.progress as done.
if hasattr(self, "progress"):
self.progress.update(inputfile.size, "Done")
return data
def before_parsing(self):
"""Set parser-specific variables and do other initial things here."""
pass
def after_parsing(self):
"""Correct data or do parser-specific validation after parsing is finished."""
pass
def updateprogress(self, inputfile, msg, xupdate=0.05):
"""Update progress."""
if hasattr(self, "progress") and random.random() < xupdate:
newstep = inputfile.pos
if newstep != self.progress.step:
self.progress.update(newstep, msg)
self.progress.step = newstep
@abstractmethod
def normalisesym(self, symlabel):
"""Standardise the symmetry labels between parsers."""
def new_internal_job(self):
"""Delete attributes that can be problematic in multistep jobs.
TODO: instead of this hack, parse each job in a multistep comptation
as a different ccData object (this is for 2.x).
Some computations are actually sequences of several jobs, and some
attributes won't work well if parsed across jobs. There include:
mpenergies: if different jobs go to different orders then
these won't be consistent and can't be converted
to an array easily
"""
for name in ("mpenergies",):
if hasattr(self, name):
delattr(self, name)
def set_attribute(self, name, value, check_change=True):
"""Set an attribute and perform an optional check when it already exists.
Note that this can be used for scalars and lists alike, whenever we want
to set a value for an attribute.
Parameters
----------
name: str
The name of the attribute.
value: str
The value for the attribute.
check_change: bool
By default we want to check that the value does not change
if the attribute already exists.
"""
if check_change and hasattr(self, name):
try:
numpy.testing.assert_equal(getattr(self, name), value)
except AssertionError:
self.logger.warning("Attribute %s changed value (%s -> %s)" % (name, getattr(self, name), value))
setattr(self, name, value)
def append_attribute(self, name, value):
"""Appends a value to an attribute."""
if not hasattr(self, name):
self.set_attribute(name, [])
getattr(self, name).append(value)
def extend_attribute(self, name, values):
"""Appends an iterable of values to an attribute."""
if not hasattr(self, name):
self.set_attribute(name, [])
getattr(self, name).extend(values)
def _assign_coreelectrons_to_element(self, element, ncore,
ncore_is_total_count=False):
"""Assign core electrons to all instances of the element.
It's usually reasonable to do this for all atoms of a given element,
because mixed usage isn't normally allowed within elements.
Parameters
----------
element: str
the chemical element to set coreelectrons for
ncore: int
the number of core electrons
ncore_is_total_count: bool
whether the ncore argument is the total count, in which case it is
divided by the number of atoms of this element
"""
atomsymbols = [self.table.element[atomno] for atomno in self.atomnos]
indices = [i for i, el in enumerate(atomsymbols) if el == element]
if ncore_is_total_count:
ncore = ncore // len(indices)
if not hasattr(self, 'coreelectrons'):
self.coreelectrons = numpy.zeros(self.natom, 'i')
self.coreelectrons[indices] = ncore
def skip_lines(self, inputfile, sequence):
"""Read trivial line types and check they are what they are supposed to be.
This function will read len(sequence) lines and do certain checks on them,
when the elements of sequence have the appropriate values. Currently the
following elements trigger checks:
'blank' or 'b' - the line should be blank
'dashes' or 'd' - the line should contain only dashes (or spaces)
'equals' or 'e' - the line should contain only equal signs (or spaces)
'stars' or 's' - the line should contain only stars (or spaces)
"""
expected_characters = {
'-': ['dashes', 'd'],
'=': ['equals', 'e'],
'*': ['stars', 's'],
}
lines = []
for expected in sequence:
# Read the line we want to skip.
line = next(inputfile)
# Blank lines are perhaps the most common thing we want to check for.
if expected in ["blank", "b"]:
try:
assert line.strip() == ""
except AssertionError:
frame, fname, lno, funcname, funcline, index = inspect.getouterframes(inspect.currentframe())[1]
parser = fname.split('/')[-1]
msg = "In %s, line %i, line not blank as expected: %s" % (parser, lno, line.strip())
self.logger.warning(msg)
# All cases of heterogeneous lines can be dealt with by the same code.
for character, keys in expected_characters.items():
if expected in keys:
try:
assert utils.str_contains_only(line.strip(), [character, ' '])
except AssertionError:
frame, fname, lno, funcname, funcline, index = inspect.getouterframes(inspect.currentframe())[1]
parser = fname.split('/')[-1]
msg = "In %s, line %i, line not all %s as expected: %s" % (parser, lno, keys[0], line.strip())
self.logger.warning(msg)
continue
# Save the skipped line, and we will return the whole list.
lines.append(line)
return lines
skip_line = lambda self, inputfile, expected: self.skip_lines(inputfile, [expected])
|
cclib/cclib
|
cclib/parser/logfileparser.py
|
Python
|
bsd-3-clause
| 20,759
|
[
"ADF",
"Avogadro",
"Dalton",
"GAMESS",
"Gaussian",
"Jaguar",
"MOPAC",
"Molpro",
"NWChem",
"ORCA",
"Q-Chem",
"cclib"
] |
9052c0a2f45d240615d46bf4a072dc0148cda5ea4a9d19db15ad97a325dbfdf6
|
#
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import espressomd
import numpy
import unittest as ut
from tests_common import *
class InteractionsBondedTest(ut.TestCase):
system = espressomd.System(box_l=[1.0, 1.0, 1.0])
box_l = 10.
start_pos = numpy.random.rand(3) * box_l
axis = numpy.random.rand(3)
axis /= numpy.linalg.norm(axis)
step = axis * 0.01
step_width = numpy.linalg.norm(step)
def setUp(self):
self.system.box_l = [self.box_l] * 3
self.system.cell_system.skin = 0.4
self.system.time_step = 1.
self.system.part.add(id=0, pos=self.start_pos, type=0)
self.system.part.add(id=1, pos=self.start_pos, type=0)
def tearDown(self):
self.system.part.clear()
# Required, since assertAlmostEqual does NOT check significant places
def assertFractionAlmostEqual(self, a, b, places=10):
if abs(b) < 1E-8:
self.assertAlmostEqual(a, b)
else:
self.assertAlmostEqual(a / b, 1.)
def assertItemsFractionAlmostEqual(self, a, b):
for i, ai in enumerate(a):
self.assertFractionAlmostEqual(ai, b[i])
# Test Harmonic Bond
def test_harmonic(self):
hb_k = 5
hb_r_0 = 1.5
hb_r_cut = 3.355
hb = espressomd.interactions.HarmonicBond(
k=hb_k, r_0=hb_r_0, r_cut=hb_r_cut)
self.system.bonded_inter.add(hb)
self.system.part[0].add_bond((hb, 1))
for i in range(335):
self.system.part[1].pos = self.system.part[1].pos + self.step
self.system.integrator.run(recalc_forces=True, steps=0)
# Calculate energies
E_sim = self.system.analysis.energy()["bonded"]
E_ref = harmonic_potential(
scalar_r=(i + 1) * self.step_width, k=hb_k, r_0=hb_r_0, r_cut=hb_r_cut)
# Calculate forces
f0_sim = self.system.part[0].f
f1_sim = self.system.part[1].f
f1_ref = self.axis * \
harmonic_force(scalar_r=(i + 1) * self.step_width,
k=hb_k, r_0=hb_r_0, r_cut=hb_r_cut)
# Check that energies match, ...
self.assertFractionAlmostEqual(E_sim, E_ref)
# force equals minus the counter-force ...
self.assertTrue((f0_sim == -f1_sim).all())
# and has correct value.
self.assertItemsFractionAlmostEqual(f1_sim, f1_ref)
# Check that bond breaks when distance > r_cut
self.system.part[1].pos = self.system.part[1].pos + self.step
with self.assertRaisesRegexp(Exception, "Encoutered errors during integrate"):
self.system.integrator.run(recalc_forces=True, steps=0)
# Test Fene Bond
def test_fene(self):
fene_k = 23.15
fene_d_r_max = 3.355
fene_r_0 = 1.1
fene = espressomd.interactions.FeneBond(
k=fene_k, d_r_max=fene_d_r_max, r_0=fene_r_0)
self.system.bonded_inter.add(fene)
self.system.part[0].add_bond((fene, 1))
for i in range(445):
self.system.part[1].pos = self.system.part[1].pos + self.step
self.system.integrator.run(recalc_forces=True, steps=0)
# Calculate energies
E_sim = self.system.analysis.energy()["bonded"]
E_ref = fene_potential(
scalar_r=(i + 1) * self.step_width, k=fene_k, d_r_max=fene_d_r_max, r_0=fene_r_0)
# Calculate forces
f0_sim = self.system.part[0].f
f1_sim = self.system.part[1].f
f1_ref = self.axis * \
fene_force(scalar_r=(i + 1) * self.step_width,
k=fene_k, d_r_max=fene_d_r_max, r_0=fene_r_0)
# Check that energies match, ...
self.assertFractionAlmostEqual(E_sim, E_ref)
# force equals minus the counter-force ...
self.assertTrue((f0_sim == -f1_sim).all())
# and has correct value.
self.assertItemsFractionAlmostEqual(f1_sim, f1_ref)
# Check that bond breaks when distance > r_cut
self.system.part[1].pos = self.system.part[1].pos + self.step
with self.assertRaisesRegexp(Exception, "Encoutered errors during integrate"):
self.system.integrator.run(recalc_forces=True, steps=0)
if __name__ == '__main__':
print("Features: ", espressomd.features())
ut.main()
|
KonradBreitsprecher/espresso
|
testsuite/interactions_bonded.py
|
Python
|
gpl-3.0
| 5,159
|
[
"ESPResSo"
] |
7044a2031c324d19d4ba3f31244a0ad155a691ae9e6d7099d36bd39a0ebee4b5
|
# Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nucleator.cli.command import Command
from nucleator.cli import properties
from nucleator.cli import utils
from nucleator.cli import unbuffered_subprocess as usp
import os, subprocess, uuid
import yaml
class Update(Command):
name = "update"
def parser_init(self, subparsers):
"""
Initialize parsers for this command.
"""
init_parser = subparsers.add_parser('update')
init_parser.add_argument("--version", required=False, help="Show versions of components", action='store_true')
def update(self, **kwargs):
"""
The update command:
- Pulls and installs Nucleator Cage and Stackset modules to contrib dir in
Nucleator configuration directory, as specified in manifest
- Recursively pulls dependent modules specified in module dependencies for
each module in manifest
"""
if kwargs.get("version"):
self.show_versions()
return
self.update_sources(**kwargs)
self.update_roles(**kwargs)
utils.write("SUCCESS - successfully updated nucleator sources and ansible roles, placed in {0}\n\n".format(properties.contrib_path()))
def write_versions(self):
"""
show version of each Nucleator stackset specified in sources.yml
pull each one into ~/.nucleator/contrib/
"""
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
stream.close()
path = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "contrib")
for sname in slist:
# sname['name'] == "siteconfig":
# sname['src']
# sname['version']
# git ls-remote http://www.kernel.org/pub/scm/git/git.git master
src = sname['src']
if src.startswith("git+"):
src = src[4:]
update_command = [ "git", "ls-remote", src ]
if 'version' in sname:
update_command.append(sname['version'])
utils.write(" ".join(update_command) + "\n")
os.environ["PYTHONUNBUFFERED"]="1"
update_process=usp.Popen(
update_command,
shell=False,
stdout=usp.PIPE,
stderr=usp.PIPE
)
update_out, update_err = update_process.communicate()
update_rc = update_process.returncode
if update_rc == 0:
vers_location = os.path.join(path, sname['name'], "__version__")
with open(vers_location, 'w') as f:
f.write("Remote Location: "+sname['src']+"\n")
f.write("Remote Branch: ")
f.write("(not specified)" if 'version' not in sname else sname['version'])
commit = update_out.split(" ")
f.write("\nLast Commit: "+commit[0]+"\n")
def show_versions(self):
"""
show version of each Nucleator stackset specified in sources.yml
pull each one into ~/.nucleator/contrib/
"""
from nucleator import __version__
utils.write("Your Nucleator core is version "+__version__+"\n\n")
path = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "contrib")
# iterate through nucleator command definitions found as immediate subdirs of path
for command_dir in next(os.walk(path))[1]:
candidate_location = os.path.join(path, command_dir, "__version__")
if os.path.isfile(candidate_location):
with open(candidate_location, 'r') as f:
read_data = f.read()
utils.write("Version of "+command_dir+"\n"+read_data)
return 0
def update_sources(self, **kwargs):
"""
update Nucleator stacksets specified in sources.yml
pull each one into ~/.nucleator/contrib/
"""
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
utils.write("\nUpdating nucleator commands from sources in {0}\n".format(sources))
try:
roles_path_tmp=os.path.join(properties.NUCLEATOR_CONFIG_DIR, "-".join( [ "contrib", str(uuid.uuid4()) ] ))
update_command = [
"ansible-galaxy", "install",
"--force",
"--role-file", sources,
"--roles-path", roles_path_tmp,
]
utils.write(" ".join(update_command) + "\n")
os.environ["PYTHONUNBUFFERED"]="1"
update_process=usp.Popen(
update_command,
shell=False,
stdout=usp.PIPE,
stderr=usp.PIPE
)
update_out, update_err = update_process.communicate()
update_rc = update_process.returncode
except Exception, e:
utils.write_err("Exception while updating nucleator commands from specified sources:\n{0}".format(e), False)
raise e
# move new contrib stacksets into place
utils.write("\nMoving updated nucleator commands into place\n")
try:
# test for existence of config dir
roles_path=os.path.join(properties.NUCLEATOR_CONFIG_DIR, "contrib")
if not os.path.isdir(roles_path):
move_sequence = "mv {1} {0}".format(roles_path, roles_path_tmp)
else:
bak_dir=os.path.join(properties.NUCLEATOR_CONFIG_DIR, "contrib.bak")
roles_path_bak=os.path.join(bak_dir, "-".join( [ "contrib.bak", str(uuid.uuid4()) ]))
move_sequence = "mkdir -p {0} && mkdir -p {1} && mv {1} {2} && mv {3} {1}".format(bak_dir, roles_path, roles_path_bak, roles_path_tmp)
utils.write(move_sequence + "\n")
os.environ["PYTHONUNBUFFERED"]="1"
move_process=usp.Popen(
move_sequence,
shell=True,
stdout=usp.PIPE,
stderr=usp.PIPE
)
move_out, move_err = move_process.communicate()
move_rc = move_process.returncode
except Exception, e:
utils.write_err("Exception while moving updated nucleator commands into place:\n{0}".format(e), False)
raise e
if update_rc != 0:
utils.write_err("Received non-zero return code {0} while attempting to update from nucleator sources using command: {1}\n\ncaptured stderr:\n{2}\n\n exiting with return code 1...".format(update_rc, " ".join(update_command), update_err))
elif move_rc !=0:
utils.write_err("Received non-zero return code {0} while attempting to move updated nucleator sources into place using command: {1}\n\ncaptured stderr:\n{2}\n\n exiting with return code 1...".format(move_rc, move_sequence, move_err))
self.write_versions()
return 0
def update_roles(self, **kwargs):
"""
Use ansible-galaxy to install Ansible roles and any role dependencies
specified in ansible/roles/roles.yml for any installed Nucleator Stackset.
"""
utils.write("\nUpdating ansible roles specified in installed Nucleator Stacksets using ansible-galaxy.\n")
cli=Command.get_cli(kwargs)
cli.import_commands(os.path.join(properties.NUCLEATOR_CONFIG_DIR,"contrib"))
path_list = cli.ansible_path_list("roles", isdir=True)
for roles_path in path_list:
sources = os.path.join(roles_path, "roles.yml")
if os.path.isfile(sources):
# import roles using ansible galaxy
update_command = [
"ansible-galaxy", "install",
"--force",
"--role-file", sources,
"--roles-path", roles_path,
]
utils.write(" ".join(update_command) + "\n")
os.environ["PYTHONUNBUFFERED"]="1"
try:
update_process=usp.Popen(
update_command,
shell=False,
stdout=usp.PIPE,
stderr=usp.PIPE
)
update_out, update_err = update_process.communicate()
update_rc = update_process.returncode
except Exception, e:
utils.write_err("Exception while updating ansible roles from specified sources:\n{0}".format(e), False)
raise e
if update_rc != 0:
utils.write_err("Received non-zero return code {0} while attempting to update ansible roles from specified sources using command: {1}\n\ncaptured stderr:\n{2}\n\n exiting with return code 1...".format(update_rc, " ".join(update_command), update_err))
return 0
# Create the singleton for auto-discovery
command = Update()
|
47lining/nucleator-core
|
lib/nucleator/core/update/commands/update.py
|
Python
|
apache-2.0
| 9,520
|
[
"Galaxy"
] |
7e7c72193144be9e702f7ed9199d823548b86f1b8f64482d8716f4b2d84644e4
|
import ddt
from django.template import Context, Template
import unittest
@ddt.ddt
class OAExtrasTests(unittest.TestCase):
template = Template(
"{% load oa_extras %}"
"{{ text|link_and_linebreak }}"
)
@ddt.data(
("", ""),
('check this https://dummy-url.com', 'https://dummy-url.com'),
('Visit this URL http://dummy-url.com', 'http://dummy-url.com'),
('dummy-text http://dummy-url.org', 'http://dummy-url.org'),
('dummy-url.com dummy-text', 'dummy-url.com')
)
@ddt.unpack
def test_link_and_linebreak(self, text, link_text):
rendered_template = self.template.render(Context({'text': text}))
self.assertIn(link_text, rendered_template)
if text:
self.assertRegexpMatches(
rendered_template,
r'<a.*target="_blank".*>{link_text}</a>'.format(link_text=link_text),
)
@ddt.data(
("hello <script></script>", "script"),
("http://dummy-url.com <applet></applet>", "applet"),
("<iframe></iframe>", "iframe"),
("<link></link>", "link"),
)
@ddt.unpack
def test_html_tags(self, text, tag):
rendered_template = self.template.render(Context({'text': text}))
escaped_tag = "<{tag}>".format(tag=tag)
self.assertIn(escaped_tag, rendered_template)
|
Stanford-Online/edx-ora2
|
openassessment/tests/test_templatetags.py
|
Python
|
agpl-3.0
| 1,368
|
[
"VisIt"
] |
92d0c44269a0f8d6346a1e0e83ea96867395721c99e44db674dfc5a645f71413
|
"""
=================================
Gaussian Mixture Model Sine Curve
=================================
This example demonstrates the behavior of Gaussian mixture models fit on data
that was not sampled from a mixture of Gaussian random variables. The dataset
is formed by 100 points loosely spaced following a noisy sine curve. There is
therefore no ground truth value for the number of Gaussian components.
The first model is a classical Gaussian Mixture Model with 10 components fit
with the Expectation-Maximization algorithm.
The second model is a Bayesian Gaussian Mixture Model with a Dirichlet process
prior fit with variational inference. The low value of the concentration prior
makes the model favor a lower number of active components. This models
"decides" to focus its modeling power on the big picture of the structure of
the dataset: groups of points with alternating directions modeled by
non-diagonal covariance matrices. Those alternating directions roughly capture
the alternating nature of the original sine signal.
The third model is also a Bayesian Gaussian mixture model with a Dirichlet
process prior but this time the value of the concentration prior is higher
giving the model more liberty to model the fine-grained structure of the data.
The result is a mixture with a larger number of active components that is
similar to the first model where we arbitrarily decided to fix the number of
components to 10.
Which model is the best is a matter of subjective judgment: do we want to
favor models that only capture the big picture to summarize and explain most of
the structure of the data while ignoring the details or do we prefer models
that closely follow the high density regions of the signal?
The last two panels show how we can sample from the last two models. The
resulting samples distributions do not look exactly like the original data
distribution. The difference primarily stems from the approximation error we
made by using a model that assumes that the data was generated by a finite
number of Gaussian components instead of a continuous noisy sine curve.
"""
import itertools
import numpy as np
from scipy import linalg
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn import mixture
color_iter = itertools.cycle(["navy", "c", "cornflowerblue", "gold", "darkorange"])
def plot_results(X, Y, means, covariances, index, title):
splot = plt.subplot(5, 1, 1 + index)
for i, (mean, covar, color) in enumerate(zip(means, covariances, color_iter)):
v, w = linalg.eigh(covar)
v = 2.0 * np.sqrt(2.0) * np.sqrt(v)
u = w[0] / linalg.norm(w[0])
# as the DP will not use every component it has access to
# unless it needs it, we shouldn't plot the redundant
# components.
if not np.any(Y == i):
continue
plt.scatter(X[Y == i, 0], X[Y == i, 1], 0.8, color=color)
# Plot an ellipse to show the Gaussian component
angle = np.arctan(u[1] / u[0])
angle = 180.0 * angle / np.pi # convert to degrees
ell = mpl.patches.Ellipse(mean, v[0], v[1], 180.0 + angle, color=color)
ell.set_clip_box(splot.bbox)
ell.set_alpha(0.5)
splot.add_artist(ell)
plt.xlim(-6.0, 4.0 * np.pi - 6.0)
plt.ylim(-5.0, 5.0)
plt.title(title)
plt.xticks(())
plt.yticks(())
def plot_samples(X, Y, n_components, index, title):
plt.subplot(5, 1, 4 + index)
for i, color in zip(range(n_components), color_iter):
# as the DP will not use every component it has access to
# unless it needs it, we shouldn't plot the redundant
# components.
if not np.any(Y == i):
continue
plt.scatter(X[Y == i, 0], X[Y == i, 1], 0.8, color=color)
plt.xlim(-6.0, 4.0 * np.pi - 6.0)
plt.ylim(-5.0, 5.0)
plt.title(title)
plt.xticks(())
plt.yticks(())
# Parameters
n_samples = 100
# Generate random sample following a sine curve
np.random.seed(0)
X = np.zeros((n_samples, 2))
step = 4.0 * np.pi / n_samples
for i in range(X.shape[0]):
x = i * step - 6.0
X[i, 0] = x + np.random.normal(0, 0.1)
X[i, 1] = 3.0 * (np.sin(x) + np.random.normal(0, 0.2))
plt.figure(figsize=(10, 10))
plt.subplots_adjust(
bottom=0.04, top=0.95, hspace=0.2, wspace=0.05, left=0.03, right=0.97
)
# Fit a Gaussian mixture with EM using ten components
gmm = mixture.GaussianMixture(
n_components=10, covariance_type="full", max_iter=100
).fit(X)
plot_results(
X, gmm.predict(X), gmm.means_, gmm.covariances_, 0, "Expectation-maximization"
)
dpgmm = mixture.BayesianGaussianMixture(
n_components=10,
covariance_type="full",
weight_concentration_prior=1e-2,
weight_concentration_prior_type="dirichlet_process",
mean_precision_prior=1e-2,
covariance_prior=1e0 * np.eye(2),
init_params="random",
max_iter=100,
random_state=2,
).fit(X)
plot_results(
X,
dpgmm.predict(X),
dpgmm.means_,
dpgmm.covariances_,
1,
"Bayesian Gaussian mixture models with a Dirichlet process prior "
r"for $\gamma_0=0.01$.",
)
X_s, y_s = dpgmm.sample(n_samples=2000)
plot_samples(
X_s,
y_s,
dpgmm.n_components,
0,
"Gaussian mixture with a Dirichlet process prior "
r"for $\gamma_0=0.01$ sampled with $2000$ samples.",
)
dpgmm = mixture.BayesianGaussianMixture(
n_components=10,
covariance_type="full",
weight_concentration_prior=1e2,
weight_concentration_prior_type="dirichlet_process",
mean_precision_prior=1e-2,
covariance_prior=1e0 * np.eye(2),
init_params="kmeans",
max_iter=100,
random_state=2,
).fit(X)
plot_results(
X,
dpgmm.predict(X),
dpgmm.means_,
dpgmm.covariances_,
2,
"Bayesian Gaussian mixture models with a Dirichlet process prior "
r"for $\gamma_0=100$",
)
X_s, y_s = dpgmm.sample(n_samples=2000)
plot_samples(
X_s,
y_s,
dpgmm.n_components,
1,
"Gaussian mixture with a Dirichlet process prior "
r"for $\gamma_0=100$ sampled with $2000$ samples.",
)
plt.show()
|
scikit-learn/scikit-learn
|
examples/mixture/plot_gmm_sin.py
|
Python
|
bsd-3-clause
| 6,076
|
[
"Gaussian"
] |
39d185c6f1961632fa48c1020ae0cc953f01c6767aaeeb0a8ab5a6b6ec93eaeb
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import pytest
from numpy.testing import assert_array_equal
import numpy as np
import os
import MDAnalysis as mda
from MDAnalysisTests.datafiles import (AUX_XVG, XVG_BAD_NCOL, XVG_BZ2,
COORDINATES_XTC, COORDINATES_TOPOLOGY)
from MDAnalysisTests.auxiliary.base import (BaseAuxReaderTest, BaseAuxReference)
from MDAnalysis.auxiliary.XVG import XVGStep
class XVGReference(BaseAuxReference):
def __init__(self):
super(XVGReference, self).__init__()
self.testdata = AUX_XVG
self.reader = mda.auxiliary.XVG.XVGReader
# add the auxdata and format for .xvg to the reference description
self.description['auxdata'] = os.path.abspath(self.testdata)
self.description['format'] = self.reader.format
# for testing the selection of data/time
self.time_selector = 0 # take time as first value in auxilairy
self.select_time_ref = np.arange(self.n_steps)
self.data_selector = [1,2] # select the second/third columns from auxiliary
self.select_data_ref = [self.format_data([2*i, 2**i]) for i in range(self.n_steps)]
class TestXVGStep():
@staticmethod
@pytest.fixture()
def step():
return XVGStep()
def test_select_time_none(self, step):
st = step._select_time(None)
assert st is None
def test_select_time_invalid_index(self, step):
with pytest.raises(ValueError, match="Time selector must be single index"):
step._select_time([0])
def test_select_data_none(self, step):
st = step._select_data(None)
assert st is None
class TestXVGReader(BaseAuxReaderTest):
@staticmethod
@pytest.fixture()
def ref():
return XVGReference()
@staticmethod
@pytest.fixture
def ref_universe(ref):
u = mda.Universe(COORDINATES_TOPOLOGY, COORDINATES_XTC)
u.trajectory.add_auxiliary('test', ref.testdata)
return u
@staticmethod
@pytest.fixture()
def reader(ref):
return ref.reader(
ref.testdata,
initial_time=ref.initial_time,
dt=ref.dt, auxname=ref.name,
time_selector=None,
data_selector=None
)
def test_changing_n_col_raises_ValueError(self, ref, reader):
# if number of columns in .xvg file is not consistent, a ValueError
# should be raised
with pytest.raises(ValueError):
reader = ref.reader(XVG_BAD_NCOL)
next(reader)
def test_time_selector_out_of_range_raises_ValueError(self, ref, reader):
# if time_selector is not a valid index of _data, a ValueError
# should be raised
with pytest.raises(ValueError):
reader.time_selector = len(reader.auxstep._data)
def test_data_selector_out_of_range_raises_ValueError(self, ref, reader):
# if data_selector is not a valid index of _data, a ValueError
# should be raised
with pytest.raises(ValueError):
reader.data_selector = [len(reader.auxstep._data)]
class XVGFileReference(XVGReference):
def __init__(self):
super(XVGFileReference, self).__init__()
self.reader = mda.auxiliary.XVG.XVGFileReader
self.format = "XVG-F"
self.description['format'] = self.format
class TestXVGFileReader(TestXVGReader):
@staticmethod
@pytest.fixture()
def ref():
return XVGFileReference()
@staticmethod
@pytest.fixture
def ref_universe(ref):
u = mda.Universe(COORDINATES_TOPOLOGY, COORDINATES_XTC)
u.trajectory.add_auxiliary('test', ref.testdata)
return u
@staticmethod
@pytest.fixture()
def reader(ref):
return ref.reader(
ref.testdata,
initial_time=ref.initial_time,
dt=ref.dt,
auxname=ref.name,
time_selector=None,
data_selector=None
)
def test_get_auxreader_for(self, ref, reader):
# Default reader of .xvg files is intead XVGReader, not XVGFileReader
# so test specifying format
reader = mda.auxiliary.core.get_auxreader_for(ref.testdata,
format=ref.format)
assert reader == ref.reader
def test_reopen(self, reader):
reader._reopen()
# should start us back at before step 0, so next takes us to step 0
reader.next()
assert reader.step == 0
def test_xvg_bz2():
reader = mda.auxiliary.XVG.XVGReader(XVG_BZ2)
assert_array_equal(reader.read_all_times(), np.array([0., 50., 100.]))
def test_xvg_file_bz2():
reader = mda.auxiliary.XVG.XVGFileReader(XVG_BZ2)
assert_array_equal(reader.read_all_times(), np.array([0., 50., 100.]))
|
MDAnalysis/mdanalysis
|
testsuite/MDAnalysisTests/auxiliary/test_xvg.py
|
Python
|
gpl-2.0
| 5,850
|
[
"MDAnalysis"
] |
9bb954c55902019cd8b2b517dfbc6fcb9fe9d5965770ba8fc2e5690666ef013d
|
from .estimator_base import *
class H2ONaiveBayesEstimator(H2OEstimator):
"""
The naive Bayes classifier assumes independence between predictor variables
conditional on the response, and a Gaussian distribution of numeric predictors with
mean and standard deviation computed from the training dataset. When building a naive
Bayes classifier, every row in the training dataset that contains at least one NA will
be skipped completely. If the test dataset has missing values, then those predictors
are omitted in the probability calculation during prediction.
Parameters
----------
laplace : int
A positive number controlling Laplace smoothing. The default zero disables smoothing.
threshold : float
The minimum standard deviation to use for observations without enough data.
Must be at least 1e-10.
eps : float
A threshold cutoff to deal with numeric instability, must be positive.
compute_metrics : bool
A logical value indicating whether model metrics should be computed. Set to FALSE
to reduce the runtime of the algorithm.
nfolds : int, optional
Number of folds for cross-validation. If nfolds >= 2, then validation must remain
empty.
fold_assignment : str
Cross-validation fold assignment scheme, if fold_column is not specified
Must be "AUTO", "Random" or "Modulo"
keep_cross_validation_predictions : bool
Whether to keep the predictions of the cross-validation models.
keep_cross_validation_fold_assignment : bool
Whether to keep the cross-validation fold assignment.
Returns
-------
Returns instance of H2ONaiveBayesEstimator
"""
def __init__(self,model_id=None, laplace=None, threshold=None, eps=None,
compute_metrics=None, balance_classes=None,
max_after_balance_size=None, nfolds=None, fold_assignment=None,
keep_cross_validation_predictions=None,
keep_cross_validation_fold_assignment=None,
checkpoint=None):
super(H2ONaiveBayesEstimator, self).__init__()
self._parms = locals()
self._parms = {k:v for k,v in self._parms.items() if k!="self"}
@property
def laplace(self):
return self._parms["laplace"]
@laplace.setter
def laplace(self, value):
self._parms["laplace"] = value
@property
def threshold(self):
return self._parms["threshold"]
@threshold.setter
def threshold(self, value):
self._parms["threshold"] = value
@property
def eps(self):
return self._parms["eps"]
@eps.setter
def eps(self, value):
self._parms["eps"] = value
@property
def compute_metrics(self):
return self._parms["compute_metrics"]
@compute_metrics.setter
def compute_metrics(self, value):
self._parms["compute_metrics"] = value
@property
def balance_classes(self):
return self._parms["balance_classes"]
@balance_classes.setter
def balance_classes(self, value):
self._parms["balance_classes"] = value
@property
def max_after_balance_size(self):
return self._parms["max_after_balance_size"]
@max_after_balance_size.setter
def max_after_balance_size(self, value):
self._parms["max_after_balance_size"] = value
@property
def nfolds(self):
return self._parms["nfolds"]
@nfolds.setter
def nfolds(self, value):
self._parms["nfolds"] = value
@property
def fold_assignment(self):
return self._parms["fold_assignment"]
@fold_assignment.setter
def fold_assignment(self, value):
self._parms["fold_assignment"] = value
@property
def keep_cross_validation_predictions(self):
return self._parms["keep_cross_validation_predictions"]
@keep_cross_validation_predictions.setter
def keep_cross_validation_predictions(self, value):
self._parms["keep_cross_validation_predictions"] = value
@property
def keep_cross_validation_fold_assignment(self):
return self._parms["keep_cross_validation_fold_assignment"]
@keep_cross_validation_fold_assignment.setter
def keep_cross_validation_fold_assignment(self, value):
self._parms["keep_cross_validation_fold_assignment"] = value
@property
def checkpoint(self):
return self._parms["checkpoint"]
@checkpoint.setter
def checkpoint(self, value):
self._parms["checkpoint"] = value
|
YzPaul3/h2o-3
|
h2o-py/h2o/estimators/naive_bayes.py
|
Python
|
apache-2.0
| 4,328
|
[
"Gaussian"
] |
7ca23c4e639eb8efec40d5d9dd58061e95c95c6cf0205e89b528a4c0f01deb91
|
# mako/codegen.py
# Copyright 2006-2021 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides functionality for rendering a parsetree constructing into module
source code."""
import json
import re
import time
from mako import ast
from mako import exceptions
from mako import filters
from mako import parsetree
from mako import util
from mako.pygen import PythonPrinter
MAGIC_NUMBER = 10
# names which are hardwired into the
# template and are not accessed via the
# context itself
TOPLEVEL_DECLARED = {"UNDEFINED", "STOP_RENDERING"}
RESERVED_NAMES = {"context", "loop"}.union(TOPLEVEL_DECLARED)
def compile( # noqa
node,
uri,
filename=None,
default_filters=None,
buffer_filters=None,
imports=None,
future_imports=None,
source_encoding=None,
generate_magic_comment=True,
strict_undefined=False,
enable_loop=True,
reserved_names=frozenset(),
):
"""Generate module source code given a parsetree node,
uri, and optional source filename"""
buf = util.FastEncodingBuffer()
printer = PythonPrinter(buf)
_GenerateRenderMethod(
printer,
_CompileContext(
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
strict_undefined,
enable_loop,
reserved_names,
),
node,
)
return buf.getvalue()
class _CompileContext:
def __init__(
self,
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
strict_undefined,
enable_loop,
reserved_names,
):
self.uri = uri
self.filename = filename
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.future_imports = future_imports
self.source_encoding = source_encoding
self.generate_magic_comment = generate_magic_comment
self.strict_undefined = strict_undefined
self.enable_loop = enable_loop
self.reserved_names = reserved_names
class _GenerateRenderMethod:
"""A template visitor object which generates the
full module source for a template.
"""
def __init__(self, printer, compiler, node):
self.printer = printer
self.compiler = compiler
self.node = node
self.identifier_stack = [None]
self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
if self.in_def:
name = "render_%s" % node.funcname
args = node.get_argument_expressions()
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
defs = None
pagetag = None
if node.is_block and not node.is_anonymous:
args += ["**pageargs"]
else:
defs = self.write_toplevel()
pagetag = self.compiler.pagetag
name = "render_body"
if pagetag is not None:
args = pagetag.body_decl.get_argument_expressions()
if not pagetag.body_decl.kwargs:
args += ["**pageargs"]
cached = eval(pagetag.attributes.get("cached", "False"))
self.compiler.enable_loop = self.compiler.enable_loop or eval(
pagetag.attributes.get("enable_loop", "False")
)
else:
args = ["**pageargs"]
cached = False
buffered = filtered = False
if args is None:
args = ["context"]
else:
args = [a for a in ["context"] + args]
self.write_render_callable(
pagetag or node, name, args, buffered, filtered, cached
)
if defs is not None:
for node in defs:
_GenerateRenderMethod(printer, compiler, node)
if not self.in_def:
self.write_metadata_struct()
def write_metadata_struct(self):
self.printer.source_map[self.printer.lineno] = max(
self.printer.source_map
)
struct = {
"filename": self.compiler.filename,
"uri": self.compiler.uri,
"source_encoding": self.compiler.source_encoding,
"line_map": self.printer.source_map,
}
self.printer.writelines(
'"""',
"__M_BEGIN_METADATA",
json.dumps(struct),
"__M_END_METADATA\n" '"""',
)
@property
def identifiers(self):
return self.identifier_stack[-1]
def write_toplevel(self):
"""Traverse a template structure for module-level directives and
generate the start of module-level code.
"""
inherit = []
namespaces = {}
module_code = []
self.compiler.pagetag = None
class FindTopLevel:
def visitInheritTag(s, node):
inherit.append(node)
def visitNamespaceTag(s, node):
namespaces[node.name] = node
def visitPageTag(s, node):
self.compiler.pagetag = node
def visitCode(s, node):
if node.ismodule:
module_code.append(node)
f = FindTopLevel()
for n in self.node.nodes:
n.accept_visitor(f)
self.compiler.namespaces = namespaces
module_ident = set()
for n in module_code:
module_ident = module_ident.union(n.declared_identifiers())
module_identifiers = _Identifiers(self.compiler)
module_identifiers.declared = module_ident
# module-level names, python code
if (
self.compiler.generate_magic_comment
and self.compiler.source_encoding
):
self.printer.writeline(
"# -*- coding:%s -*-" % self.compiler.source_encoding
)
if self.compiler.future_imports:
self.printer.writeline(
"from __future__ import %s"
% (", ".join(self.compiler.future_imports),)
)
self.printer.writeline("from mako import runtime, filters, cache")
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
self.printer.writeline("__M_dict_builtin = dict")
self.printer.writeline("__M_locals_builtin = locals")
self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
self.printer.writeline("_modified_time = %r" % time.time())
self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
self.printer.writeline(
"_template_filename = %r" % self.compiler.filename
)
self.printer.writeline("_template_uri = %r" % self.compiler.uri)
self.printer.writeline(
"_source_encoding = %r" % self.compiler.source_encoding
)
if self.compiler.imports:
buf = ""
for imp in self.compiler.imports:
buf += imp + "\n"
self.printer.writeline(imp)
impcode = ast.PythonCode(
buf,
source="",
lineno=0,
pos=0,
filename="template defined imports",
)
else:
impcode = None
main_identifiers = module_identifiers.branch(self.node)
mit = module_identifiers.topleveldefs
module_identifiers.topleveldefs = mit.union(
main_identifiers.topleveldefs
)
module_identifiers.declared.update(TOPLEVEL_DECLARED)
if impcode:
module_identifiers.declared.update(impcode.declared_identifiers)
self.compiler.identifiers = module_identifiers
self.printer.writeline(
"_exports = %r"
% [n.name for n in main_identifiers.topleveldefs.values()]
)
self.printer.write_blanks(2)
if len(module_code):
self.write_module_code(module_code)
if len(inherit):
self.write_namespaces(namespaces)
self.write_inherit(inherit[-1])
elif len(namespaces):
self.write_namespaces(namespaces)
return list(main_identifiers.topleveldefs.values())
def write_render_callable(
self, node, name, args, buffered, filtered, cached
):
"""write a top-level render callable.
this could be the main render() method or that of a top-level def."""
if self.in_def:
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_toplevel(%s)" % decorator
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"def %s(%s):" % (name, ",".join(args)),
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writeline("context._push_buffer()")
self.identifier_stack.append(
self.compiler.identifiers.branch(self.node)
)
if (not self.in_def or self.node.is_block) and "**pageargs" in args:
self.identifier_stack[-1].argument_declared.add("pageargs")
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
self.printer.writeline(
"__M_locals = __M_dict_builtin(%s)"
% ",".join(
[
"%s=%s" % (x, x)
for x in self.identifiers.argument_declared
]
)
)
self.write_variable_declares(self.identifiers, toplevel=True)
for n in self.node.nodes:
n.accept_visitor(self)
self.write_def_finish(self.node, buffered, filtered, cached)
self.printer.writeline(None)
self.printer.write_blanks(2)
if cached:
self.write_cache_decorator(
node, name, args, buffered, self.identifiers, toplevel=True
)
def write_module_code(self, module_code):
"""write module-level template code, i.e. that which
is enclosed in <%! %> tags in the template."""
for n in module_code:
self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
def write_inherit(self, node):
"""write the module-level inheritance-determination callable."""
self.printer.writelines(
"def _mako_inherit(template, context):",
"_mako_generate_namespaces(context)",
"return runtime._inherit_from(context, %s, _template_uri)"
% (node.parsed_attributes["file"]),
None,
)
def write_namespaces(self, namespaces):
"""write the module-level namespace-generating callable."""
self.printer.writelines(
"def _mako_get_namespace(context, name):",
"try:",
"return context.namespaces[(__name__, name)]",
"except KeyError:",
"_mako_generate_namespaces(context)",
"return context.namespaces[(__name__, name)]",
None,
None,
)
self.printer.writeline("def _mako_generate_namespaces(context):")
for node in namespaces.values():
if "import" in node.attributes:
self.compiler.has_ns_imports = True
self.printer.start_source(node.lineno)
if len(node.nodes):
self.printer.writeline("def make_namespace():")
export = []
identifiers = self.compiler.identifiers.branch(node)
self.in_def = True
class NSDefVisitor:
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
if node.is_anonymous:
raise exceptions.CompileException(
"Can't put anonymous blocks inside "
"<%namespace>",
**node.exception_kwargs,
)
self.write_inline_def(node, identifiers, nested=False)
export.append(node.funcname)
vis = NSDefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.printer.writeline("return [%s]" % (",".join(export)))
self.printer.writeline(None)
self.in_def = False
callable_name = "make_namespace()"
else:
callable_name = "None"
if "file" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.TemplateNamespace(%r,"
" context._clean_inheritance_tokens(),"
" templateuri=%s, callables=%s, "
" calling_uri=_template_uri)"
% (
node.name,
node.parsed_attributes.get("file", "None"),
callable_name,
)
)
elif "module" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.ModuleNamespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri,"
" module=%s)"
% (
node.name,
callable_name,
node.parsed_attributes.get("module", "None"),
)
)
else:
self.printer.writeline(
"ns = runtime.Namespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri)"
% (node.name, callable_name)
)
if eval(node.attributes.get("inheritable", "False")):
self.printer.writeline("context['self'].%s = ns" % (node.name))
self.printer.writeline(
"context.namespaces[(__name__, %s)] = ns" % repr(node.name)
)
self.printer.write_blanks(1)
if not len(namespaces):
self.printer.writeline("pass")
self.printer.writeline(None)
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
"""write variable declarations at the top of a function.
the variable declarations are in the form of callable
definitions for defs and/or name lookup within the
function's context argument. the names declared are based
on the names that are referenced in the function body,
which don't otherwise have any explicit assignment
operation. names that are assigned within the body are
assumed to be locally-scoped variables and are not
separately declared.
for def callable definitions, if the def is a top-level
callable then a 'stub' callable is generated which wraps
the current Context into a closure. if the def is not
top-level, it is fully rendered as a local closure.
"""
# collection of all defs available to us in this scope
comp_idents = {c.funcname: c for c in identifiers.defs}
to_write = set()
# write "context.get()" for all variables we are going to
# need that arent in the namespace yet
to_write = to_write.union(identifiers.undeclared)
# write closure functions for closures that we define
# right here
to_write = to_write.union(
[c.funcname for c in identifiers.closuredefs.values()]
)
# remove identifiers that are declared in the argument
# signature of the callable
to_write = to_write.difference(identifiers.argument_declared)
# remove identifiers that we are going to assign to.
# in this way we mimic Python's behavior,
# i.e. assignment to a variable within a block
# means that variable is now a "locally declared" var,
# which cannot be referenced beforehand.
to_write = to_write.difference(identifiers.locally_declared)
if self.compiler.enable_loop:
has_loop = "loop" in to_write
to_write.discard("loop")
else:
has_loop = False
# if a limiting set was sent, constraint to those items in that list
# (this is used for the caching decorator)
if limit is not None:
to_write = to_write.intersection(limit)
if toplevel and getattr(self.compiler, "has_ns_imports", False):
self.printer.writeline("_import_ns = {}")
self.compiler.has_imports = True
for ident, ns in self.compiler.namespaces.items():
if "import" in ns.attributes:
self.printer.writeline(
"_mako_get_namespace(context, %r)."
"_populate(_import_ns, %r)"
% (
ident,
re.split(r"\s*,\s*", ns.attributes["import"]),
)
)
if has_loop:
self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
for ident in to_write:
if ident in comp_idents:
comp = comp_idents[ident]
if comp.is_block:
if not comp.is_anonymous:
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
else:
if comp.is_root():
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
elif ident in self.compiler.namespaces:
self.printer.writeline(
"%s = _mako_get_namespace(context, %r)" % (ident, ident)
)
else:
if getattr(self.compiler, "has_ns_imports", False):
if self.compiler.strict_undefined:
self.printer.writelines(
"%s = _import_ns.get(%r, UNDEFINED)"
% (ident, ident),
"if %s is UNDEFINED:" % ident,
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
None,
)
else:
self.printer.writeline(
"%s = _import_ns.get"
"(%r, context.get(%r, UNDEFINED))"
% (ident, ident, ident)
)
else:
if self.compiler.strict_undefined:
self.printer.writelines(
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
)
else:
self.printer.writeline(
"%s = context.get(%r, UNDEFINED)" % (ident, ident)
)
self.printer.writeline("__M_writer = context.writer()")
def write_def_decl(self, node, identifiers):
"""write a locally-available callable referencing a top-level def"""
funcname = node.funcname
namedecls = node.get_argument_expressions()
nameargs = node.get_argument_expressions(as_call=True)
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
nameargs.insert(0, "context._locals(__M_locals)")
else:
nameargs.insert(0, "context")
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
self.printer.writeline(
"return render_%s(%s)" % (funcname, ",".join(nameargs))
)
self.printer.writeline(None)
def write_inline_def(self, node, identifiers, nested):
"""write a locally-available def callable inside an enclosing def."""
namedecls = node.get_argument_expressions()
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_inline(context, %s)" % decorator
)
self.printer.writeline(
"def %s(%s):" % (node.funcname, ",".join(namedecls))
)
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
self.printer.writelines(
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writelines("context._push_buffer()")
identifiers = identifiers.branch(node, nested=nested)
self.write_variable_declares(identifiers)
self.identifier_stack.append(identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, filtered, cached)
self.printer.writeline(None)
if cached:
self.write_cache_decorator(
node,
node.funcname,
namedecls,
False,
identifiers,
inline=True,
toplevel=False,
)
def write_def_finish(
self, node, buffered, filtered, cached, callstack=True
):
"""write the end section of a rendering function, either outermost or
inline.
this takes into account if the rendering function was filtered,
buffered, etc. and closes the corresponding try: block if any, and
writes code to retrieve captured content, apply filters, send proper
return value."""
if not buffered and not cached and not filtered:
self.printer.writeline("return ''")
if callstack:
self.printer.writelines(
"finally:", "context.caller_stack._pop_frame()", None
)
if buffered or filtered or cached:
if buffered or cached:
# in a caching scenario, don't try to get a writer
# from the context after popping; assume the caching
# implemenation might be using a context with no
# extra buffers
self.printer.writelines(
"finally:", "__M_buf = context._pop_buffer()"
)
else:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
)
if callstack:
self.printer.writeline("context.caller_stack._pop_frame()")
s = "__M_buf.getvalue()"
if filtered:
s = self.create_filter_callable(
node.filter_args.args, s, False
)
self.printer.writeline(None)
if buffered and not cached:
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
if buffered or cached:
self.printer.writeline("return %s" % s)
else:
self.printer.writelines("__M_writer(%s)" % s, "return ''")
def write_cache_decorator(
self,
node_or_pagetag,
name,
args,
buffered,
identifiers,
inline=False,
toplevel=False,
):
"""write a post-function decorator to replace a rendering
callable with a cached version of itself."""
self.printer.writeline("__M_%s = %s" % (name, name))
cachekey = node_or_pagetag.parsed_attributes.get(
"cache_key", repr(name)
)
cache_args = {}
if self.compiler.pagetag is not None:
cache_args.update(
(pa[6:], self.compiler.pagetag.parsed_attributes[pa])
for pa in self.compiler.pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
cache_args.update(
(pa[6:], node_or_pagetag.parsed_attributes[pa])
for pa in node_or_pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
if "timeout" in cache_args:
cache_args["timeout"] = int(eval(cache_args["timeout"]))
self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
pass_args = [
"%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
]
self.write_variable_declares(
identifiers,
toplevel=toplevel,
limit=node_or_pagetag.undeclared_identifiers(),
)
if buffered:
s = (
"context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r)"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
)
)
# apply buffer_filters
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
self.printer.writelines("return " + s, None)
else:
self.printer.writelines(
"__M_writer(context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
),
"return ''",
None,
)
def create_filter_callable(self, args, target, is_expression):
"""write a filter-applying expression based on the filters
present in the given filter names, adjusting for the global
'default' filter aliases as needed."""
def locate_encode(name):
if re.match(r"decode\..+", name):
return "filters." + name
else:
return filters.DEFAULT_ESCAPES.get(name, name)
if "n" not in args:
if is_expression:
if self.compiler.pagetag:
args = self.compiler.pagetag.filter_args.args + args
if self.compiler.default_filters and "n" not in args:
args = self.compiler.default_filters + args
for e in args:
# if filter given as a function, get just the identifier portion
if e == "n":
continue
m = re.match(r"(.+?)(\(.*\))", e)
if m:
ident, fargs = m.group(1, 2)
f = locate_encode(ident)
e = f + fargs
else:
e = locate_encode(e)
assert e is not None
target = "%s(%s)" % (e, target)
return target
def visitExpression(self, node):
self.printer.start_source(node.lineno)
if (
len(node.escapes)
or (
self.compiler.pagetag is not None
and len(self.compiler.pagetag.filter_args.args)
)
or len(self.compiler.default_filters)
):
s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True
)
self.printer.writeline("__M_writer(%s)" % s)
else:
self.printer.writeline("__M_writer(%s)" % node.text)
def visitControlLine(self, node):
if node.isend:
self.printer.writeline(None)
if node.has_loop_context:
self.printer.writeline("finally:")
self.printer.writeline("loop = __M_loop._exit()")
self.printer.writeline(None)
else:
self.printer.start_source(node.lineno)
if self.compiler.enable_loop and node.keyword == "for":
text = mangle_mako_loop(node, self.printer)
else:
text = node.text
self.printer.writeline(text)
children = node.get_children()
# this covers the three situations where we want to insert a pass:
# 1) a ternary control line with no children,
# 2) a primary control line with nothing but its own ternary
# and end control lines, and
# 3) any control line with no content other than comments
if not children or (
all(
isinstance(c, (parsetree.Comment, parsetree.ControlLine))
for c in children
)
and all(
(node.is_ternary(c.keyword) or c.isend)
for c in children
if isinstance(c, parsetree.ControlLine)
)
):
self.printer.writeline("pass")
def visitText(self, node):
self.printer.start_source(node.lineno)
self.printer.writeline("__M_writer(%s)" % repr(node.content))
def visitTextTag(self, node):
filtered = len(node.filter_args.args) > 0
if filtered:
self.printer.writelines(
"__M_writer = context._push_writer()", "try:"
)
for n in node.nodes:
n.accept_visitor(self)
if filtered:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
"__M_writer(%s)"
% self.create_filter_callable(
node.filter_args.args, "__M_buf.getvalue()", False
),
None,
)
def visitCode(self, node):
if not node.ismodule:
self.printer.write_indented_block(
node.text, starting_lineno=node.lineno
)
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
# if we are the "template" def, fudge locally
# declared/modified variables into the "__M_locals" dictionary,
# which is used for def calls within the same template,
# to simulate "enclosing scope"
self.printer.writeline(
"__M_locals_builtin_stored = __M_locals_builtin()"
)
self.printer.writeline(
"__M_locals.update(__M_dict_builtin([(__M_key,"
" __M_locals_builtin_stored[__M_key]) for __M_key in"
" [%s] if __M_key in __M_locals_builtin_stored]))"
% ",".join([repr(x) for x in node.declared_identifiers()])
)
def visitIncludeTag(self, node):
self.printer.start_source(node.lineno)
args = node.attributes.get("args")
if args:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri, %s)"
% (node.parsed_attributes["file"], args)
)
else:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri)"
% (node.parsed_attributes["file"])
)
def visitNamespaceTag(self, node):
pass
def visitDefTag(self, node):
pass
def visitBlockTag(self, node):
if node.is_anonymous:
self.printer.writeline("%s()" % node.funcname)
else:
nameargs = node.get_argument_expressions(as_call=True)
nameargs += ["**pageargs"]
self.printer.writeline(
"if 'parent' not in context._data or "
"not hasattr(context._data['parent'], '%s'):" % node.funcname
)
self.printer.writeline(
"context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
)
self.printer.writeline("\n")
def visitCallNamespaceTag(self, node):
# TODO: we can put namespace-specific checks here, such
# as ensure the given namespace will be imported,
# pre-import the namespace, etc.
self.visitCallTag(node)
def visitCallTag(self, node):
self.printer.writeline("def ccall(caller):")
export = ["body"]
callable_identifiers = self.identifiers.branch(node, nested=True)
body_identifiers = callable_identifiers.branch(node, nested=False)
# we want the 'caller' passed to ccall to be used
# for the body() function, but for other non-body()
# <%def>s within <%call> we want the current caller
# off the call stack (if any)
body_identifiers.add_declared("caller")
self.identifier_stack.append(body_identifiers)
class DefVisitor:
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
self.write_inline_def(node, callable_identifiers, nested=False)
if not node.is_anonymous:
export.append(node.funcname)
# remove defs that are within the <%call> from the
# "closuredefs" defined in the body, so they dont render twice
if node.funcname in body_identifiers.closuredefs:
del body_identifiers.closuredefs[node.funcname]
vis = DefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.identifier_stack.pop()
bodyargs = node.body_decl.get_argument_expressions()
self.printer.writeline("def body(%s):" % ",".join(bodyargs))
# TODO: figure out best way to specify
# buffering/nonbuffering (at call time would be better)
buffered = False
if buffered:
self.printer.writelines("context._push_buffer()", "try:")
self.write_variable_declares(body_identifiers)
self.identifier_stack.append(body_identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, False, False, callstack=False)
self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
self.printer.writelines(
# push on caller for nested call
"context.caller_stack.nextcaller = "
"runtime.Namespace('caller', context, "
"callables=ccall(__M_caller))",
"try:",
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"__M_writer(%s)"
% self.create_filter_callable([], node.expression, True),
"finally:",
"context.caller_stack.nextcaller = None",
None,
)
class _Identifiers:
"""tracks the status of identifier names as template code is rendered."""
def __init__(self, compiler, node=None, parent=None, nested=False):
if parent is not None:
# if we are the branch created in write_namespaces(),
# we don't share any context from the main body().
if isinstance(node, parsetree.NamespaceTag):
self.declared = set()
self.topleveldefs = util.SetLikeDict()
else:
# things that have already been declared
# in an enclosing namespace (i.e. names we can just use)
self.declared = (
set(parent.declared)
.union([c.name for c in parent.closuredefs.values()])
.union(parent.locally_declared)
.union(parent.argument_declared)
)
# if these identifiers correspond to a "nested"
# scope, it means whatever the parent identifiers
# had as undeclared will have been declared by that parent,
# and therefore we have them in our scope.
if nested:
self.declared = self.declared.union(parent.undeclared)
# top level defs that are available
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
else:
self.declared = set()
self.topleveldefs = util.SetLikeDict()
self.compiler = compiler
# things within this level that are referenced before they
# are declared (e.g. assigned to)
self.undeclared = set()
# things that are declared locally. some of these things
# could be in the "undeclared" list as well if they are
# referenced before declared
self.locally_declared = set()
# assignments made in explicit python blocks.
# these will be propagated to
# the context of local def calls.
self.locally_assigned = set()
# things that are declared in the argument
# signature of the def callable
self.argument_declared = set()
# closure defs that are defined in this level
self.closuredefs = util.SetLikeDict()
self.node = node
if node is not None:
node.accept_visitor(self)
illegal_names = self.compiler.reserved_names.intersection(
self.locally_declared
)
if illegal_names:
raise exceptions.NameConflictError(
"Reserved words declared in template: %s"
% ", ".join(illegal_names)
)
def branch(self, node, **kwargs):
"""create a new Identifiers for a new Node, with
this Identifiers as the parent."""
return _Identifiers(self.compiler, node, self, **kwargs)
@property
def defs(self):
return set(self.topleveldefs.union(self.closuredefs).values())
def __repr__(self):
return (
"Identifiers(declared=%r, locally_declared=%r, "
"undeclared=%r, topleveldefs=%r, closuredefs=%r, "
"argumentdeclared=%r)"
% (
list(self.declared),
list(self.locally_declared),
list(self.undeclared),
[c.name for c in self.topleveldefs.values()],
[c.name for c in self.closuredefs.values()],
self.argument_declared,
)
)
def check_declared(self, node):
"""update the state of this Identifiers with the undeclared
and declared identifiers of the given node."""
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.locally_declared.add(ident)
def add_declared(self, ident):
self.declared.add(ident)
if ident in self.undeclared:
self.undeclared.remove(ident)
def visitExpression(self, node):
self.check_declared(node)
def visitControlLine(self, node):
self.check_declared(node)
def visitCode(self, node):
if not node.ismodule:
self.check_declared(node)
self.locally_assigned = self.locally_assigned.union(
node.declared_identifiers()
)
def visitNamespaceTag(self, node):
# only traverse into the sub-elements of a
# <%namespace> tag if we are the branch created in
# write_namespaces()
if self.node is node:
for n in node.nodes:
n.accept_visitor(self)
def _check_name_exists(self, collection, node):
existing = collection.get(node.funcname)
collection[node.funcname] = node
if (
existing is not None
and existing is not node
and (node.is_block or existing.is_block)
):
raise exceptions.CompileException(
"%%def or %%block named '%s' already "
"exists in this template." % node.funcname,
**node.exception_kwargs,
)
def visitDefTag(self, node):
if node.is_root() and not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
# visit defs only one level deep
if node is self.node:
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'"
% (node.name, self.node.name),
**node.exception_kwargs,
)
elif isinstance(
self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of <%%call> tag"
% (node.name,),
**node.exception_kwargs,
)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
if not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
self.undeclared.add(node.funcname)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitTextTag(self, node):
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
def visitIncludeTag(self, node):
self.check_declared(node)
def visitPageTag(self, node):
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
self.check_declared(node)
def visitCallNamespaceTag(self, node):
self.visitCallTag(node)
def visitCallTag(self, node):
if node is self.node:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
else:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
_FOR_LOOP = re.compile(
r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
)
def mangle_mako_loop(node, printer):
"""converts a for loop into a context manager wrapped around a for loop
when access to the `loop` variable has been detected in the for loop body
"""
loop_variable = LoopVariable()
node.accept_visitor(loop_variable)
if loop_variable.detected:
node.nodes[-1].has_loop_context = True
match = _FOR_LOOP.match(node.text)
if match:
printer.writelines(
"loop = __M_loop._enter(%s)" % match.group(2),
"try:"
# 'with __M_loop(%s) as loop:' % match.group(2)
)
text = "for %s in loop:" % match.group(1)
else:
raise SyntaxError("Couldn't apply loop context: %s" % node.text)
else:
text = node.text
return text
class LoopVariable:
"""A node visitor which looks for the name 'loop' within undeclared
identifiers."""
def __init__(self):
self.detected = False
def _loop_reference_detected(self, node):
if "loop" in node.undeclared_identifiers():
self.detected = True
else:
for n in node.get_children():
n.accept_visitor(self)
def visitControlLine(self, node):
self._loop_reference_detected(node)
def visitCode(self, node):
self._loop_reference_detected(node)
def visitExpression(self, node):
self._loop_reference_detected(node)
|
sqlalchemy/mako
|
mako/codegen.py
|
Python
|
mit
| 47,147
|
[
"VisIt"
] |
b129a3a740b7ba41a7d266777c6a9b70a2ecf5139f35ed3e90e20146bca0d4f8
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, include(admin.site.urls)),
# User management
url(r'^users/', include('cookiecutter_example_project.users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
|
hjwp/cookiecutter-example-project
|
config/urls.py
|
Python
|
mit
| 1,452
|
[
"VisIt"
] |
464426c7d547aeeb1ff9c72152279e3830f115b0097374c46a00c2806f74c9f2
|
#
#
# File to test current configuration of GranuleCell project.
#
# To execute this type of file, type '..\..\..\nC.bat -python XXX.py' (Windows)
# or '../../../nC.sh -python XXX.py' (Linux/Mac). Note: you may have to update the
# NC_HOME and NC_MAX_MEMORY variables in nC.bat/nC.sh
#
# Author: Padraig Gleeson
#
# This file has been developed as part of the neuroConstruct project
# This work has been funded by the Medical Research Council and the
# Wellcome Trust
#
#
import sys
import os
import time
from java.io import File
from ucl.physiol.neuroconstruct.project import ProjectManager
from ucl.physiol.neuroconstruct.hpc.mpi import MpiSettings
from ucl.physiol.neuroconstruct.simulation import SimulationData
from ucl.physiol.neuroconstruct.gui.plotter import PlotManager
from ucl.physiol.neuroconstruct.project import SimPlot
from ucl.physiol.neuroconstruct.cell.utils import CellTopologyHelper
from ucl.physiol.neuroconstruct.utils.units import UnitConverter
sys.path.append(os.environ["NC_HOME"]+"/pythonNeuroML/nCUtils")
import ncutils as nc
projFile = File("../MooseDemo.ncx")
############## Main settings ##################
simConfigs = []
simConfigs.append("Default Simulation Configuration")
simConfigs.append("BigNet")
simConfigs.append("GapJuncs")
simDt = 0.001
neuroConstructSeed = 12345
simulatorSeed = 11111
simulators = ["NEURON", "GENESIS_PHYS", "GENESIS_SI", "MOOSE_PHYS", "MOOSE_SI"]
mpiConf = MpiSettings.LOCAL_SERIAL
maxElecLens = [-1]
numConcurrentSims = 4
varTimestepNeuron = False
runSims = True
analyseSims = True
plotSims = True
plotVoltageOnly = True
runInBackground = True
verbose = True
#############################################
print "Loading project from "+ projFile.getCanonicalPath()
pm = ProjectManager()
project = pm.loadProject(projFile)
allRunningSims = []
allFinishedSims = []
def updateSimsRunning():
for sim in allRunningSims:
timeFile = File(project.getProjectMainDirectory(), "simulations/"+sim+"/time.dat")
timeFile2 = File(project.getProjectMainDirectory(), "simulations/"+sim+"/time.txt") # for PSICS...
print "Checking file: "+timeFile.getAbsolutePath() +", exists: "+ str(timeFile.exists())
if (timeFile.exists()):
allFinishedSims.append(sim)
allRunningSims.remove(sim)
else:
print "Checking file: "+timeFile2.getAbsolutePath() +", exists: "+ str(timeFile2.exists())
if (timeFile2.exists()):
allFinishedSims.append(sim)
allRunningSims.remove(sim)
print "allFinishedSims: "+str(allFinishedSims)
print "allRunningSims: "+str(allRunningSims)
def reloadSims(waitForSimsToFinish):
print "Trying to reload sims: "+str(allFinishedSims)
plottedSims = []
for simRef in allFinishedSims:
simDir = File(projFile.getParentFile(), "/simulations/"+simRef)
timeFile = File(simDir, "time.dat")
timeFile2 = File(simDir,"time.txt") # for PSICS...
if timeFile.exists() or timeFile2.exists():
if verbose: print "--- Reloading data from simulation in directory: %s"%simDir.getCanonicalPath()
time.sleep(1) # wait a while...
try:
simData = SimulationData(simDir)
simData.initialise()
times = simData.getAllTimes()
if analyseSims:
'''
volts = simData.getVoltageAtAllTimes(cellSegmentRef)
if verbose: print "Got "+str(len(volts))+" data points on cell seg ref: "+cellSegmentRef
analyseStartTime = 0
analyseStopTime = simConfig.getSimDuration()
analyseThreshold = -20 # mV
spikeTimes = SpikeAnalyser.getSpikeTimes(volts, times, analyseThreshold, analyseStartTime, analyseStopTime)
print "Spike times in %s for sim %s: %s"%(cellSegmentRef, simRef, str(spikeTimes))
'''
if plotSims:
simConfigName = simData.getSimulationProperties().getProperty("Sim Config")
if simConfigName.find('(')>=0:
simConfigName = simConfigName[0:simConfigName.find('(')]
for dataStore in simData.getAllLoadedDataStores():
ds = simData.getDataSet(dataStore.getCellSegRef(), dataStore.getVariable(), False)
if not plotVoltageOnly or dataStore.getVariable() == SimPlot.VOLTAGE:
plotFrame = PlotManager.getPlotterFrame("Behaviour of "+dataStore.getVariable() \
+" on: %s for sim config: %s"%(str(simulators), simConfigName))
plotFrame.addDataSet(ds)
plottedSims.append(simRef)
except:
print "Error analysing simulation data from: %s"%simDir.getCanonicalPath()
print sys.exc_info()
for simRef in plottedSims:
allFinishedSims.remove(simRef)
if waitForSimsToFinish and len(allRunningSims)>0:
if verbose: print "Waiting for sims: %s to finish..."%str(allRunningSims)
time.sleep(2) # wait a while...
updateSimsRunning()
reloadSims(True)
def doCheckNumberSims():
print "Sims currently running: "+str(allRunningSims)
while (len(allRunningSims)>=numConcurrentSims):
print "Waiting..."
time.sleep(4) # wait a while...
updateSimsRunning()
for simConfigName in simConfigs:
for maxElecLen in maxElecLens:
project.simulationParameters.setDt(simDt)
simConfig = project.simConfigInfo.getSimConfig(simConfigName)
recompSuffix = ""
if maxElecLen > 0:
cellGroup = simConfig.getCellGroups().get(0)
cell = project.cellManager.getCell(project.cellGroupsInfo.getCellType(cellGroup))
print "Recomp cell in: "+cellGroup+" which is: "+str(cell)
info = CellTopologyHelper.recompartmentaliseCell(cell, maxElecLen, project)
print "*** Recompartmentalised cell: "+info
recompSuffix = "_"+str(maxElecLen)
pm.doGenerate(simConfig.getName(), neuroConstructSeed)
while pm.isGenerating():
if verbose: print "Waiting for the project to be generated with Simulation Configuration: "+str(simConfig)
time.sleep(1)
print "Generated network with %i cell(s)" % project.generatedCellPositions.getNumberInAllCellGroups()
simRefPrefix = (simConfigName+"_").replace(' ', '')
doCheckNumberSims()
if simulators.count("NEURON")>0:
simRef = simRefPrefix+"_N"+recompSuffix
project.simulationParameters.setReference(simRef)
if runSims:
nc.generateAndRunNeuron(project,
pm,
simConfig,
simRef,
simulatorSeed,
verbose=verbose,
runInBackground=runInBackground,
varTimestep=varTimestepNeuron)
allRunningSims.append(simRef)
doCheckNumberSims()
if simulators.count("PSICS")>0:
simRef = simRefPrefix+"_P"+recompSuffix
project.simulationParameters.setReference(simRef)
if runSims:
nc.generateAndRunPsics(project,
pm,
simConfig,
simRef,
simulatorSeed,
verbose=verbose,
runInBackground=runInBackground)
allRunningSims.append(simRef)
doCheckNumberSims()
for sim in simulators:
if "MOOSE" in sim:
simRef = simRefPrefix+"_M"+recompSuffix
units = -1 # leave as what's set in project
if "_SI" in sim:
simRef = simRef+"_SI"
units = UnitConverter.GENESIS_SI_UNITS
if "_PHYS" in sim:
simRef = simRef+"_PHYS"
units = UnitConverter.GENESIS_PHYSIOLOGICAL_UNITS
project.simulationParameters.setReference(simRef)
if runSims:
nc.generateAndRunMoose(project,
pm,
simConfig,
simRef,
simulatorSeed,
verbose=verbose,
quitAfterRun=runInBackground,
runInBackground=runInBackground,
units=units)
allRunningSims.append(simRef)
time.sleep(2) # wait a while before running GENESIS...
doCheckNumberSims()
for sim in simulators:
if "GENESIS" in sim:
simRef = simRefPrefix+"_G"+recompSuffix
units = -1 # leave as what's set in project
if "_SI" in sim:
simRef = simRef+"_SI"
units = UnitConverter.GENESIS_SI_UNITS
if "_PHYS" in sim:
simRef = simRef+"_PHYS"
units = UnitConverter.GENESIS_PHYSIOLOGICAL_UNITS
project.simulationParameters.setReference(simRef)
if runSims:
nc.generateAndRunGenesis(project,
pm,
simConfig,
simRef,
simulatorSeed,
verbose=verbose,
quitAfterRun=runInBackground,
runInBackground=runInBackground,
units=units,
symmetricComps=False)
allRunningSims.append(simRef)
time.sleep(2) # wait a while before running GENESISsym...
doCheckNumberSims()
if simulators.count("GENESISsym")>0:
simRef = simRefPrefix+"_Gs"+recompSuffix
project.simulationParameters.setReference(simRef)
if runSims:
nc.generateAndRunGenesis(project,
pm,
simConfig,
simRef,
simulatorSeed,
verbose=verbose,
quitAfterRun=runInBackground,
runInBackground=runInBackground,
symmetricComps=True)
allRunningSims.append(simRef)
updateSimsRunning()
reloadSims(waitForSimsToFinish=False)
reloadSims(waitForSimsToFinish=True)
if not plotSims:
sys.exit()
|
pgleeson/TestArea
|
models/MooseDemo/pythonScripts/RunTests.py
|
Python
|
gpl-2.0
| 11,129
|
[
"MOOSE",
"NEURON"
] |
8fbcef167635095021339781302d7020f07d5f9d0c825cb5fe4d844d3c552a92
|
#!/usr/bin/env python
########################################################################
# File : dirac-wms-get-queue-cpu-time.py
# Author : Federico Stagni
########################################################################
""" Report CPU length of queue, in seconds
This script is used by the dirac-pilot script to set the CPUTime left, which is a limit for the matching
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Base import Script
Script.registerSwitch( "C:", "CPUNormalizationFactor=", "CPUNormalizationFactor, in case it is known" )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile]' % Script.scriptName ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
CPUNormalizationFactor = 0.0
for unprocSw in Script.getUnprocessedSwitches():
if unprocSw[0] in ( "C", "CPUNormalizationFactor" ):
CPUNormalizationFactor = float( unprocSw[1] )
if __name__ == "__main__":
from DIRAC.WorkloadManagementSystem.Client.CPUNormalization import getCPUTime
cpuTime = getCPUTime( CPUNormalizationFactor )
print cpuTime
DIRAC.exit( 0 )
|
vmendez/DIRAC
|
WorkloadManagementSystem/scripts/dirac-wms-get-queue-cpu-time.py
|
Python
|
gpl-3.0
| 1,226
|
[
"DIRAC"
] |
b9c3077ae1424ab8a92644a6bf88e2a5794d9f67eac8d0cc52a22ddd3f2c7673
|
from django.core import serializers
from questionnaire.models import Questionnaire, Section, SubSection, Question, QuestionGroup, QuestionOption, QuestionGroupOrder
questionnaire = Questionnaire.objects.get(name="JRF 2013 Core English", description="From dropbox as given by Rouslan")
section_1 = Section.objects.create(order=4, questionnaire=questionnaire, name="Routine Coverage",
title="Immunization and Vitamin A Coverage <br/> National Administrative Coverage for the Year 2013")
sub_section = SubSection.objects.create(order=1, section=section_1, title="Administrative coverage")
question1 = Question.objects.create(text="Vaccine/Supplement",
UID='C00048', answer_type='MultiChoice',
instructions="Please complete separately for each vaccine, even if they are given in combination (e.g., if Pentavalent vaccine DTP-HepB-Hib is used, fill in the data for DTP3, HepB3 and Hib3)")
QuestionOption.objects.create(text="BCG", question=question1)
QuestionOption.objects.create(text="HepB, birth dose (given within 24 hours of birth)", question=question1,
instructions="Provide ONLY hepatitis B vaccine doses given within 24 hours of birth. If time of birth is unknown, please provide doses of hepatitis B vaccine given within first day of life. (For example, if the infant is born on day 0, include all HepB does given on days 0 and 1.) This indicator is NOT equivalent to HepB1")
QuestionOption.objects.create(text="DTP1", question=question1)
QuestionOption.objects.create(text="DTP3", question=question1)
QuestionOption.objects.create(text="Polio3 (OPV or IPV)", question=question1,
instructions="This refers to the third dose of polio vaccine, excluding polio 0 (zero), if such a dose is included in the national schedule.")
QuestionOption.objects.create(text="HepB3", question=question1,
instructions="""In countries using monovalent vaccine for all doses, this refers to the third dose of hepatitis B vaccine, including the birth dose, if such a dose is included in the national schedule.<br/>
In countries that are using monovalent vaccine for the birth dose and combination vaccine for the subsequent doses, HepB3 will refer to the third dose of the combination vaccine in addition to the birth dose.""")
QuestionOption.objects.create(text="Hib3", question=question1)
QuestionOption.objects.create(text="Pneumococcal conjugate vaccine 1st dose", question=question1)
QuestionOption.objects.create(text="Pneumococcal conjugate vaccine 2nd dose", question=question1)
QuestionOption.objects.create(text="Pneumococcal conjugate vaccine 3rd dose", question=question1)
QuestionOption.objects.create(text="Rotavirus 1st dose", question=question1)
QuestionOption.objects.create(text="Rotavirus last dose (2nd or 3rd depending on schedule)", question=question1)
QuestionOption.objects.create(text="MCV1 (measles-containing vaccine, 1st dose)", question=question1,
instructions="Measles-containing vaccine (MCV) includes measles vaccine, measles-rubella vaccine, measles-mumps-rubella vaccine, etc. Fill in the rows for both MCV and rubella vaccines even if they were given in combination.")
QuestionOption.objects.create(text="Rubella 1 (rubella-containing vaccine)", question=question1,
instructions="Measles-containing vaccine (MCV) includes measles vaccine, measles-rubella vaccine, measles-mumps-rubella vaccine, etc. Fill in the rows for both MCV and rubella vaccines even if they were given in combination.")
QuestionOption.objects.create(text="MCV2 (measles-containing vaccine, 2nd dose)", question=question1,
instructions="Measles-containing vaccine (MCV) includes measles vaccine, measles-rubella vaccine, measles-mumps-rubella vaccine, etc. Fill in the rows for both MCV and rubella vaccines even if they were given in combination.")
QuestionOption.objects.create(text="Vitamin A, 1st dose", question=question1)
QuestionOption.objects.create(text="Japanese encephalitis vaccine", question=question1)
QuestionOption.objects.create(text="Tetanus toxoid-containing vaccine (TT2+) ", question=question1)
QuestionOption.objects.create(text="Protection at birth (PAB) against neonatal tetanus", question=question1,
instructions="This refers to children who are protected at birth (PAB) against neonatal tetanus by their mother's TT status; this information is collected during the DTP1 visit - a child is deemed protected if the mother has received 2 doses of TT in the last pregnancy or at-least 3 doses of TT in previous years. If the country does not calculate PAB, leave the cells blank.")
question2 = Question.objects.create(text="Description of the denominator used in coverage calculation",
UID='C00049', answer_type='MultiChoice')
QuestionOption.objects.create(text="live birth", question=question2)
QuestionOption.objects.create(text="surviving infants", question=question2)
QuestionOption.objects.create(text="less than 59 months", question=question2)
QuestionOption.objects.create(text="12 - 59 months", question=question2)
QuestionOption.objects.create(text="6 - 59 months", question=question2)
QuestionOption.objects.create(text="pregnant women", question=question2,
instructions="The number of live births can be used as a proxy for the total number of pregnant women.")
question3 = Question.objects.create(text="Number in target group(denominator)",
UID='C00050', answer_type='Number', )
question4 = Question.objects.create(text="Number of doses administered through routine services (numerator)",
UID='C00051', answer_type='Number')
question5 = Question.objects.create(text="Percent coverage (=C/B*100)", UID='C00052', answer_type='Number')
parent1 = QuestionGroup.objects.create(subsection=sub_section, order=1, allow_multiples=True)
parent1.question.add(question1, question2, question3, question3, question4, question5)
QuestionGroupOrder.objects.create(question=question1, question_group=parent1, order=1)
QuestionGroupOrder.objects.create(question=question2, question_group=parent1, order=2)
QuestionGroupOrder.objects.create(question=question3, question_group=parent1, order=3)
QuestionGroupOrder.objects.create(question=question4, question_group=parent1, order=4)
QuestionGroupOrder.objects.create(question=question5, question_group=parent1, order=5)
sub_section2 = SubSection.objects.create(order=2, section=section_1, title="Accuracy of administrative coverage",
description="Administrative coverage estimates can be biased by inaccurate numerators and/or denominators. Use this space to describe any factors limiting the accuracy of the coverage estimates entered in the table above. Some common problems are listed here. Numerators may be underestimated because of incomplete reporting from reporting units or the exclusion of other vaccinating sources, such as the private sector and NGOs; or overestimated because of over-reporting from reporting units, for example, when other target groups are included. Denominators may have problems arising from population movements, inaccurate census estimations or projections, or multiple sources of data.")
question21 = Question.objects.create(text="Describe any factors limiting the accuracy of the numerator: ",
UID='C00053', answer_type='Text')
question22 = Question.objects.create(text="Describe any factors limiting the accuracy of the denominator: (denominator = number in target group)",
UID='C00054', answer_type='Text')
parent2 = QuestionGroup.objects.create(subsection=sub_section2, order=1)
parent2.question.add(question21)
QuestionGroupOrder.objects.create(question=question21, question_group=parent2, order=1)
parent3 = QuestionGroup.objects.create(subsection=sub_section2, order=2)
parent3.question.add(question22)
QuestionGroupOrder.objects.create(question=question22, question_group=parent3, order=1)
sub_section3 = SubSection.objects.create(order=3, section=section_1, title="Completeness of district level reporting",
description="This table collects information about the completeness of district reporting, i.e., the main reporting system which produced the numbers in the previous table on vaccine coverage. The number of expected reports is equal to the number of districts multiplied by the number of reporting periods in the year")
question31 = Question.objects.create(text="Total number of district reports expected at the national level from all districts across repording periods in 2013 (e.g., # districts x 12 months)",
UID='C00055', answer_type='Number')
question32 = Question.objects.create(text="Total number of district reports actually received at the national level from all districts across reporting periods in 2013",
UID='C00056', answer_type='Number')
parent4 = QuestionGroup.objects.create(subsection=sub_section3, order=1)
parent4.question.add(question31)
QuestionGroupOrder.objects.create(question=question31, question_group=parent4, order=1)
parent5 = QuestionGroup.objects.create(subsection=sub_section3, order=2)
parent5.question.add(question32)
QuestionGroupOrder.objects.create(question=question32, question_group=parent5, order=1)
sub_section4 = SubSection.objects.create(order=4, section=section_1, title="HPV Vaccine Doses administered: 2013",
description="Report the number of HPV vaccinations given to females by their age at time of administration for each of the three recommended doses of HPV vaccine. If age is unknown but can be estimated, report for the estimated age. For example, if vaccination is offered exclusively to girls in the 6th school form and most girls in the 6th school form are eleven years of age, vaccinations by dose may be reported as vaccinations for girls eleven years of age.")
question41 = Question.objects.create(text="Vaccine administered (age in years)", UID='C00057', answer_type='MultiChoice')
QuestionOption.objects.create(text="9", question=question41)
QuestionOption.objects.create(text="10", question=question41)
QuestionOption.objects.create(text="11", question=question41)
QuestionOption.objects.create(text="12", question=question41)
QuestionOption.objects.create(text="13", question=question41)
QuestionOption.objects.create(text="14", question=question41)
QuestionOption.objects.create(text="15+", question=question41)
QuestionOption.objects.create(text="unknown age", question=question41)
question42 = Question.objects.create(text="1st dose", UID='C00058', answer_type='Number')
question43 = Question.objects.create(text="2d dose", UID='C00059', answer_type='Number')
question44 = Question.objects.create(text="3d dose", UID='C00060', answer_type='Number')
parent7 = QuestionGroup.objects.create(subsection=sub_section4, order=1, allow_multiples=True)
parent7.question.add(question41, question42, question43, question44)
QuestionGroupOrder.objects.create(question=question41, question_group=parent7, order=1)
QuestionGroupOrder.objects.create(question=question42, question_group=parent7, order=2)
QuestionGroupOrder.objects.create(question=question43, question_group=parent7, order=3)
QuestionGroupOrder.objects.create(question=question44, question_group=parent7, order=4)
sub_section5 = SubSection.objects.create(order=5, section=section_1, title="Accuracy of reported HPV Vaccine Doses")
question51 = Question.objects.create(text="Describe any factors limiting the accuracy of the administered doses",
UID='C00061', answer_type='Text')
parent8 = QuestionGroup.objects.create(subsection=sub_section5, order=1)
parent8.question.add(question51)
QuestionGroupOrder.objects.create(question=question51, question_group=parent8, order=1)
sub_section6 = SubSection.objects.create(order=6, section=section_1, title="Seasonal Influenza Vaccine Doses Administered",
description="In an updated position paper (2012), WHO recommends that countries considering the initiation or expansion of seasonal influenza vaccination programmes give the highest priority to pregnant women. Additional risk groups to be considered for vaccination, in no particular order of priority, are: children aged 6-59 months; the elderly; individuals with specific chronic medical conditions; and healthcare workers. Report immunization coverage in this table using data collected from vaccination clinics/sites on the number of doses administered for each of the risk groups that are included in the country-specific policy for seasonal influenza vaccination. ")
question61 = Question.objects.create(text="Description of target population", UID='C00062', answer_type='MultiChoice')
QuestionOption.objects.create(text="Children 6-23 months", question=question61)
QuestionOption.objects.create(text="Children >=24 months up to 9 years", question=question61)
QuestionOption.objects.create(text="Elderly (please specify minimum age under explanatory comments)", question=question61)
QuestionOption.objects.create(text="Pregnant women", question=question61)
QuestionOption.objects.create(text="Health care workers", question=question61)
QuestionOption.objects.create(text="Persons with chronic diseases ", question=question61)
#instruction = (e.g. respiratory, cardiac, liver and renal diseases; neurodevelopmental, immunological and haematological disorders, diabetes; obesity etc.)
QuestionOption.objects.create(text="Others)", question=question61)
#instruction = (may include various other groups: poultry workers, subnational levels, government officials, adults, etc
question62 = Question.objects.create(text="Number in target group (denominator)", UID='C00063', answer_type='Number')
question63 = Question.objects.create(text="Number of doses administered through routine services (numerator)", UID='C00064', answer_type='Number')
question64 = Question.objects.create(text="Percent coverage (=C/B*100)", UID='C00065', answer_type='Number')
parent6 = QuestionGroup.objects.create(subsection=sub_section6, order=1, allow_multiples=True)
parent6.question.add(question61, question62, question63, question64)
QuestionGroupOrder.objects.create(question=question61, question_group=parent6, order=1)
QuestionGroupOrder.objects.create(question=question62, question_group=parent6, order=2)
QuestionGroupOrder.objects.create(question=question63, question_group=parent6, order=3)
QuestionGroupOrder.objects.create(question=question64, question_group=parent6, order=4)
############################################ GENERATE FIXTURES
# questionnaires = Questionnaire.objects.all()
# sections = Section.objects.all()
# subsections = SubSection.objects.all()
# questions = Question.objects.all()
# question_groups = QuestionGroup.objects.all()
# options = QuestionOption.objects.all()
# orders = QuestionGroupOrder.objects.all()
# data = serializers.serialize("json", [questionnaires])
# print data
# data = serializers.serialize("json", [sections])
# print data
# data = serializers.serialize("json", [subsections])
# print data
#
# data = serializers.serialize("json", [questions])
# print data
#
# data = serializers.serialize("json", [question_groups])
# print data
#
# data = serializers.serialize("json", [options, orders])
# print data
|
testvidya11/ejrf
|
questionnaire/fixtures/questionnaire/section_4a.py
|
Python
|
bsd-3-clause
| 15,470
|
[
"VisIt"
] |
c7bea4781061ab4b03823e01198923917fe7429ab52c3d15025a8ebb8ca12e0e
|
# -*- coding: utf-8 -*-
#
# LatentStrainAnalysis documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 28 10:49:47 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'LatentStrainAnalysis'
copyright = u'2015, Brian Cleary'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'LatentStrainAnalysisdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'LatentStrainAnalysis.tex', u'LatentStrainAnalysis Documentation',
u'Brian Cleary', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'latentstrainanalysis', u'LatentStrainAnalysis Documentation',
[u'Brian Cleary'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'LatentStrainAnalysis', u'LatentStrainAnalysis Documentation',
u'Brian Cleary', 'LatentStrainAnalysis', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
brian-cleary/LatentStrainAnalysis
|
docs/source/conf.py
|
Python
|
mit
| 8,288
|
[
"Brian"
] |
de1057c526bdbf6cdf3aee19aa5db7c6509ff932a9593ab5dad089d1f2bffe92
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2011 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
##
""" A calls receipt implementation """
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.reporting.report import ObjectListReport
_ = stoqlib_gettext
class CallsReport(ObjectListReport):
"""Realized calls to client report"""
title = _("Calls Report")
main_object_name = (_("call"), _("calls"))
def __init__(self, filename, objectlist, data, *args, **kwargs):
person = kwargs.pop('person', None)
if person:
self.main_object_name = (_("performed call to %s") % person.name,
_("performed calls to %s") % person.name)
ObjectListReport.__init__(self, filename, objectlist, data,
*args, **kwargs)
|
tiagocardosos/stoq
|
stoqlib/reporting/callsreport.py
|
Python
|
gpl-2.0
| 1,653
|
[
"VisIt"
] |
24d3bf0ac77fd7800a775dfc8317b6694f2d1808f28c99e311dc891f9cdc91c6
|
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Timothy Berkelbach <tim.berkelbach@gmail.com>
# Qiming Sun <osirpt.sun@gmail.com>
#
'''
Non-relativistic Restricted Kohn-Sham for periodic systems at a single k-point
See Also:
pyscf.pbc.dft.krks.py : Non-relativistic Restricted Kohn-Sham for periodic
systems with k-point sampling
'''
import time
import numpy
import pyscf.dft
from pyscf import lib
from pyscf.lib import logger
from pyscf.pbc.scf import hf as pbchf
from pyscf.pbc.scf import khf
from pyscf.pbc.dft import gen_grid
from pyscf.pbc.dft import numint
from pyscf.dft import rks as mol_ks
from pyscf.pbc.dft import multigrid
from pyscf import __config__
def get_veff(ks, cell=None, dm=None, dm_last=0, vhf_last=0, hermi=1,
kpt=None, kpts_band=None):
'''Coulomb + XC functional
.. note::
This function will change the ks object.
Args:
ks : an instance of :class:`RKS`
XC functional are controlled by ks.xc attribute. Attribute
ks.grids might be initialized.
dm : ndarray or list of ndarrays
A density matrix or a list of density matrices
Returns:
matrix Veff = J + Vxc. Veff can be a list matrices, if the input
dm is a list of density matrices.
'''
if cell is None: cell = ks.cell
if dm is None: dm = ks.make_rdm1()
if kpt is None: kpt = ks.kpt
t0 = (time.clock(), time.time())
omega, alpha, hyb = ks._numint.rsh_and_hybrid_coeff(ks.xc, spin=cell.spin)
hybrid = abs(hyb) > 1e-10 or abs(alpha) > 1e-10
if not hybrid and isinstance(ks.with_df, multigrid.MultiGridFFTDF):
n, exc, vxc = multigrid.nr_rks(ks.with_df, ks.xc, dm, hermi,
kpt.reshape(1,3), kpts_band,
with_j=True, return_j=False)
logger.debug(ks, 'nelec by numeric integration = %s', n)
t0 = logger.timer(ks, 'vxc', *t0)
return vxc
ground_state = (isinstance(dm, numpy.ndarray) and dm.ndim == 2
and kpts_band is None)
# Use grids.non0tab to detect whether grids are initialized. For
# UniformGrids, grids.coords as a property cannot indicate whehter grids are
# initialized.
if ks.grids.non0tab is None:
ks.grids.build(with_non0tab=True)
if (isinstance(ks.grids, gen_grid.BeckeGrids) and
ks.small_rho_cutoff > 1e-20 and ground_state):
ks.grids = prune_small_rho_grids_(ks, cell, dm, ks.grids, kpt)
t0 = logger.timer(ks, 'setting up grids', *t0)
if hermi == 2: # because rho = 0
n, exc, vxc = 0, 0, 0
else:
n, exc, vxc = ks._numint.nr_rks(cell, ks.grids, ks.xc, dm, 0,
kpt, kpts_band)
logger.debug(ks, 'nelec by numeric integration = %s', n)
t0 = logger.timer(ks, 'vxc', *t0)
if not hybrid:
vj = ks.get_j(cell, dm, hermi, kpt, kpts_band)
vxc += vj
else:
if getattr(ks.with_df, '_j_only', False): # for GDF and MDF
ks.with_df._j_only = False
vj, vk = ks.get_jk(cell, dm, hermi, kpt, kpts_band)
vk *= hyb
if abs(omega) > 1e-10:
vklr = ks.get_k(cell, dm, hermi, kpt, kpts_band, omega=omega)
vklr *= (alpha - hyb)
vk += vklr
vxc += vj - vk * .5
if ground_state:
exc -= numpy.einsum('ij,ji', dm, vk).real * .5 * .5
if ground_state:
ecoul = numpy.einsum('ij,ji', dm, vj).real * .5
else:
ecoul = None
vxc = lib.tag_array(vxc, ecoul=ecoul, exc=exc, vj=None, vk=None)
return vxc
def _patch_df_beckegrids(density_fit):
def new_df(self, auxbasis=None, with_df=None, *args, **kwargs):
mf = density_fit(self, auxbasis, with_df, *args, **kwargs)
mf.with_df._j_only = True
mf.grids = gen_grid.BeckeGrids(self.cell)
mf.grids.level = getattr(__config__, 'pbc_dft_rks_RKS_grids_level',
mf.grids.level)
return mf
return new_df
NELEC_ERROR_TOL = getattr(__config__, 'pbc_dft_rks_prune_error_tol', 0.02)
def prune_small_rho_grids_(ks, cell, dm, grids, kpts):
rho = ks.get_rho(dm, grids, kpts)
n = numpy.dot(rho, grids.weights)
if abs(n-cell.nelectron) < NELEC_ERROR_TOL*n:
rho *= grids.weights
idx = abs(rho) > ks.small_rho_cutoff / grids.weights.size
logger.debug(ks, 'Drop grids %d',
grids.weights.size - numpy.count_nonzero(idx))
grids.coords = numpy.asarray(grids.coords [idx], order='C')
grids.weights = numpy.asarray(grids.weights[idx], order='C')
grids.non0tab = grids.make_mask(cell, grids.coords)
return grids
@lib.with_doc(pbchf.get_rho.__doc__)
def get_rho(mf, dm=None, grids=None, kpt=None):
if dm is None: dm = mf.make_rdm1()
if grids is None: grids = mf.grids
if kpt is None: kpt = mf.kpt
if isinstance(mf.with_df, multigrid.MultiGridFFTDF):
rho = mf.with_df.get_rho(dm, kpt)
else:
rho = mf._numint.get_rho(mf.cell, dm, grids, kpt, mf.max_memory)
return rho
def _dft_common_init_(mf, xc='LDA,VWN'):
mf.xc = xc
mf.grids = gen_grid.UniformGrids(mf.cell)
# Use rho to filter grids
mf.small_rho_cutoff = getattr(__config__,
'pbc_dft_rks_RKS_small_rho_cutoff', 1e-7)
##################################################
# don't modify the following attributes, they are not input options
# Note Do not refer to .with_df._numint because mesh/coords may be different
if isinstance(mf, khf.KSCF):
mf._numint = numint.KNumInt(mf.kpts)
else:
mf._numint = numint.NumInt()
mf._keys = mf._keys.union(['xc', 'grids', 'small_rho_cutoff'])
class KohnShamDFT(mol_ks.KohnShamDFT):
__init__ = _dft_common_init_
def dump_flags(self, verbose=None):
logger.info(self, 'XC functionals = %s', self.xc)
logger.info(self, 'small_rho_cutoff = %g', self.small_rho_cutoff)
self.grids.dump_flags(verbose)
return self
def reset(self, mol=None):
pbchf.SCF.reset(self, mol)
self.grids.reset(mol)
return self
class RKS(KohnShamDFT, pbchf.RHF):
'''RKS class adapted for PBCs.
This is a literal duplication of the molecular RKS class with some `mol`
variables replaced by `cell`.
'''
def __init__(self, cell, kpt=numpy.zeros(3), xc='LDA,VWN'):
pbchf.RHF.__init__(self, cell, kpt)
KohnShamDFT.__init__(self, xc)
def dump_flags(self, verbose=None):
pbchf.RHF.dump_flags(self, verbose)
KohnShamDFT.dump_flags(self, verbose)
return self
get_veff = get_veff
energy_elec = pyscf.dft.rks.energy_elec
get_rho = get_rho
density_fit = _patch_df_beckegrids(pbchf.RHF.density_fit)
mix_density_fit = _patch_df_beckegrids(pbchf.RHF.mix_density_fit)
if __name__ == '__main__':
from pyscf.pbc import gto
cell = gto.Cell()
cell.unit = 'A'
cell.atom = 'C 0., 0., 0.; C 0.8917, 0.8917, 0.8917'
cell.a = '''0. 1.7834 1.7834
1.7834 0. 1.7834
1.7834 1.7834 0. '''
cell.basis = 'gth-szv'
cell.pseudo = 'gth-pade'
cell.verbose = 7
cell.output = '/dev/null'
cell.build()
mf = RKS(cell)
print(mf.kernel())
|
gkc1000/pyscf
|
pyscf/pbc/dft/rks.py
|
Python
|
apache-2.0
| 7,989
|
[
"PySCF"
] |
670c775e87af5eeed13b2b6c82fab52d4abc1385f534ec56605139df466045eb
|
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division
import numpy as np
class conv_ac_dp_c():
def __init__(self, pb, dtype=np.float64):
""" Conversion from atom-centered to dominant product basis and back, for vectors and tensors """
self.pb = pb
|
gkc1000/pyscf
|
pyscf/nao/m_conv_ac_dp.py
|
Python
|
apache-2.0
| 871
|
[
"PySCF"
] |
c4261d9049240959e85ceaf4041a9e2b6d7de0ddcddb8383633549504ba4e60b
|
#!/usr/bin/env python
'''
Consensus Maker
Version 2.0
By Brendan Kohrn and Scott Kennedy(1)
(1) Department of Pathology, University of Washington School of Medicine, Seattle, WA 98195
Based on work by Scott Kennedy
January 21, 2014
Written for Python 2.7.3
Required modules: Pysam, Samtools
Inputs:
A position-sorted paired-end BAM file containing reads with a duplex tag in the header.
Outputs:
1: A paired-end BAM file containing SSCSs
2: A single-end BAM file containing unpaired SSCSs (if --read_type is 'd')
3: A single-end BAM file containing reads with less common cigar strings
4: A single-end BAM file containing reads not in --read_type
5: A tagcounts file
Note that quality scores in outputs 1, 2, and 3 are just space fillers and do not signify anything about the quality of the sequence.
The program starts at the position of the first good read, determined by the type of read specified on startup. It then goes through the file until it finds a new position, saving all reads as it goes. When it finds a new position, it sends the saved reads to the consensus maker, one tag at a time, untill it runs out of tags. Consensus sequences are saved until their mates come up, at which point both are written to the output BAM file, read 1 first. After making consensuses with the reads from the first position, it continues on through the origional file until it finds another new position, sends those reads to the consensus maker, and so on until the end of the file. At the end of the file, any remaining reads are sent through the consensus maker, and any unpaired consensuses are written to a file ending in _UP.bam.
In the future, this program may be able to autodetect read length.
usage: ConsensusMaker.py [-h] [--infile INFILE] [--tagfile TAGFILE]
[--outfile OUTFILE] [--rep_filt REP_FILT]
[--minmem MINMEM] [--maxmem MAXMEM] [--cutoff CUTOFF]
[--Ncutoff NCUTOFF] [--readlength READ_LENGTH]
[--read_type READ_TYPE] [--isize ISIZE]
[--read_out ROUT] [--filt FILT]
optional arguments:
-h, --help show this help message and exit
--infile INFILE input BAM file
--tagfile TAGFILE output tagcounts file
--outfile OUTFILE output BAM file
--rep_filt REP_FILT Remove tags with homomeric runs of nucleotides of
length x. [9]
--minmem MINMEM Minimum number of reads allowed to comprise a
consensus. [3]
--maxmem MAXMEM Maximum number of reads allowed to comprise a
consensus. [1000]
--cutoff CUTOFF Percentage of nucleotides at a given position in a
read that must be identical in order for a consensus
to be called at that position. [0.7]
--Ncutoff NCUTOFF With --filt 'n', maximum fraction of Ns allowed in a
consensus [1.0]
--readlength READ_LENGTH
Length of the input read that is being used. [84]
--read_type READ_TYPE
A string specifying which types of read to consider.
Read types: n: Neither read 1 or read 2 mapped. m:
Either read 1 or read 2 mapped, but not both. p: Both
read 1 and read 2 mapped, not a propper pair. d: Both
read 1 and read 2 mapped, propper pair. s: Single
ended reads ['dpm']
--isize ISIZE maximum distance between read pairs
--read_out ROUT How often you want to be told what the program is
doing. [1000000]
--filt FILT A string indicating which filters should be
implemented. Filters: s: Softclipping filter. o:
Overlap filter. n: N filter. ['osn']
Details of different arguments:
--minmem and --maxmem set the range of family sizes (constrained by cigar score) that can be used to make a consensus sequence. Examples use --minmem of 3 and --maxmem of 1000
Example 1:
Ten reads (readlength = 80) have a particular barcode. Of these ten, nine of them have a cigar string of 80M, while one has a cigar string of 39M1I40M. Only the nine with a cigar string of 80M are sent on to be made into a SSCS.
Example 2:
Three reads (readlength 80) have a particular barcode. Of these, two have a cigar string of 80M, and one has a cigar string of 20M1D60M. No SSCS results.
Example 3:
A family with over 1000 members exists. A random sample of 1000 reads from that family is used to make a SSCS.
--cutoff sets the strictness of the consensus making.
Example (--cutoff = 0.7):
Four reads (readlength = 10) are as follows:
Read 1: ACTGATACTT
Read 2: ACTGAAACCT
Read 3: ACTGATACCT
Read 4: ACTGATACTT
The resulting SSCS is:
ACTGATACNT
--Ncutoff, with --filt n enabled, sets the maximum percentage of Ns allowed in a SSCS.
Example (--Ncutoff = .1, --readlength = 20):
Two SSCSs are generated as follows:
SSCS 1: ACGTGANCTAGTNCTNTACC
SSCS 2: GATCTAGTNCATGACCGATA
SSCS 2 passes the n filter (10%) with 1/20 = 5% Ns, while SSCS 1 does not with 3/20 = 15% Ns.
--readlength sets the length of the reads imputed. If this value is set incorrectly, the program will often crash with an error message about sequence length not matching quality score length, or will output an empty SSCS bam file.
--read_type sets which reads are considered to have 'good' flags. Options are:
d: Paired-end reads where both reads in the pair map, and where the two are properly paired (read 2 maps in the opposite direction and on the opposite strand from read 1). Flags are 99, 83, 163, and 147 .
p: Paired-end reads where both reads in the pair map, but the two are not properly paired. Flags are 97, 81, 161, 145, 129, 65, 177, and 113.
m: Paired-end reads where only one read in the pair maps. Flags are 181, 117, 137, 133, 73, 89, 69, and 153.
n: Paired-end reads where neither read in the pair maps, and single end unmapped reads. Flags are 141, 77, and 4.
s: Single end mapped reads. Flags are 0 and 16.
--filt sets which filters are used. Options are:
o: Overlap filter. Filters out any read pairs which overlap. Only works on reads of type d (see above).
s: Softclipping filter. Filters out any reads which have been soft-clipped in alignment. This avoids later problems with hard-clipping.
n: N filter. Filters out consensus sequences with a higher percentage of Ns than the threshold imposed by --Ncutoff. Without this option, --Ncutoff doesn't do anything.
--isize
If not -1, sets the maximum distance between read 1 and read 2 for the two to not be considered unpaired. Only works if --read_type is 'd'
'''
import sys
import pysam
import re
import random
from collections import defaultdict
from argparse import ArgumentParser
def printRead(readIn):
sys.stderr.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (readIn.qname, readIn.flag, readIn.tid, readIn.pos, readIn.mapq, readIn.cigar, readIn.mrnm, readIn.mpos, readIn.isize, readIn.seq, readIn.qual, readIn.tags))
def consensusMaker (groupedReadsList, cutoff, readLength) :
'''The consensus maker uses a simple "majority rules" algorithm to qmake a consensus at each base position. If no nucleotide majority reaches above the minimum theshold (--cutoff), the position is considered undefined and an 'N' is placed at that position in the read.'''
nucIdentityList=[0, 0, 0, 0, 0, 0] # In the order of T, C, G, A, N, Total
nucKeyDict = {0:'T', 1:'C', 2:'G', 3:'A', 4:'N'}
consensusRead = ''
for i in xrange(readLength) : # Count the types of nucleotides at a position in a read. i is the nucleotide index within a read in groupedReadsList
for j in xrange(len(groupedReadsList)): # Do this for every read that comprises a SMI group. j is the read index within groupedReadsList
try:
if groupedReadsList[j][i] == 'T' :
nucIdentityList[0] += 1
elif groupedReadsList[j][i] == 'C':
nucIdentityList[1] += 1
elif groupedReadsList[j][i] == 'G':
nucIdentityList[2] += 1
elif groupedReadsList[j][i] == 'A':
nucIdentityList[3] += 1
elif groupedReadsList[j][i] == 'N':
nucIdentityList[4] += 1
else:
nucIdentityList[4] += 1
nucIdentityList[5] += 1
except:
break
try:
for j in [0, 1, 2, 3, 4] :
if float(nucIdentityList[j])/float(nucIdentityList[5]) > cutoff :
consensusRead += nucKeyDict[j]
break
elif j==4:
consensusRead += 'N'
except:
consensusRead += 'N'
nucIdentityList=[0, 0, 0, 0, 0, 0] # Reset for the next nucleotide position
return consensusRead
def tagStats(tagCountsFile):
familySizeCounts=defaultdict( lambda: 0 )
fIn = open(tagCountsFile, 'r')
fOut = open(tagCountsFile.replace('.tagcounts', '.tagstats'), 'w')
for line in fIn:
familySizeCounts[int(line.strip().split()[1].split(":")[0])] += 1
fIn.close()
totals = 0
for size in familySizeCounts.keys():
familySizeCounts[size] *= int(size)
totals += int(familySizeCounts[size])
for size in sorted(familySizeCounts.keys()):
fOut.write("%s\t%s\n" % (size, float(familySizeCounts[size])/float(totals)))
fOut.close()
return(True)
def main():
#Parameters to be input.
parser=ArgumentParser()
parser.add_argument("--infile", action="store", dest="infile", help="input BAM file", required=True)
parser.add_argument("--tagfile", action="store", dest="tagfile", help="output tagcounts file", default='sys.stdout', required=True)
parser.add_argument("--outfile", action="store", dest="outfile", help="output BAM file", required=True)
parser.add_argument("--rep_filt", action="store", type=int, dest='rep_filt', help="Remove tags with homomeric runs of nucleotides of length x. [9]", default=9 )
parser.add_argument('--minmem', type=int, default=3, dest='minmem', help="Minimum number of reads allowed to comprise a consensus. [3] ")
parser.add_argument('--maxmem', type=int, default=1000, dest='maxmem', help="Maximum number of reads allowed to comprise a consensus. [1000]")
parser.add_argument('--cutoff', type=float, default=.7, dest='cutoff', help="Percentage of nucleotides at a given position in a read that must be identical in order for a consensus to be called at that position. [0.7]")
parser.add_argument('--Ncutoff', type=float, default=1, dest='Ncutoff', help="With --filt 'n', maximum fraction of Ns allowed in a consensus [1.0]")
parser.add_argument('--readlength', type=int, default=84, dest='read_length', help="Length of the input read that is being used. [80]")
parser.add_argument('--read_type', type=str, action="store", dest='read_type', default="dpm", help="A string specifying which types of read to consider. Read types: n: Neither read 1 or read 2 mapped. m: Either read 1 or read 2 mapped, but not both. p: Both read 1 and read 2 mapped, not a propper pair. d: Both read 1 and read 2 mapped, propper pair. s: Single ended reads\n\t\t['dpm']")
parser.add_argument('--isize', type = int, default=-1, dest='isize', help="maximum distance between read pairs")
parser.add_argument('--read_out', type = int, default = 1000000, dest = 'rOut', help = 'How often you want to be told what the program is doing. [1000000]')
parser.add_argument('--filt', type=str, default='osn', dest='filt', help="A string indicating which filters should be implemented. Filters: s: Softclipping filter. o: Overlap filter. n: N filter. ['osn']")
o = parser.parse_args()
# Initialization of all global variables, main input/output files, and main iterator and dictionaries.
goodFlag=[]
if 'd' in o.read_type:
goodFlag.extend((99, 83, 163, 147))
if 'm' in o.read_type:
goodFlag.extend((181, 117, 137, 133, 73, 89, 69, 153))
if 'p' in o.read_type:
goodFlag.extend((97, 81, 161, 145, 129, 65, 177, 113))
if 'n' in o.read_type:
goodFlag.extend((141, 77, 4))
if 's' in o.read_type:
goodFlag.extend((0, 16))
inBam = pysam.Samfile( o.infile, "rb" ) # Open the input BAM file
outBam = pysam.Samfile( o.outfile, "wb", template = inBam ) # Open the output BAM file
outNC1 = pysam.Samfile( o.outfile.replace(".bam","_LCC.bam"),"wb", template = inBam )
nonMap = pysam.Samfile( o.outfile.replace(".bam","_NM.bam"), "wb", template = inBam ) # File for reads with strange flags
if o.read_type == 'd':
extraBam = pysam.Samfile(o.outfile.replace(".bam","_UP.bam"), "wb", template = inBam)
readNum = 0
nM = 0
bF = 0
oL = 0
sC = 0
rT = 0
nC = 0
LCC = 0
ConMade = 0
if o.read_type == 'd':
UP = 0
fileDone=False # Initialize end of file bool
finished=False
readOne=False
qualScore = 'J'*o.read_length # Set a dummy quality score
bamEntry = inBam.fetch( until_eof = True ) # Initialize the iterator
readWin = [bamEntry.next(), ''] # Get the first read
winPos = 0
readDict = {} # Initialize the read dictionary
tagDict = defaultdict( lambda: 0 ) # Initialize the tag dictionary
consensusDict={}
#Start going through the input BAM file, one position at a time.
for line in bamEntry:
winPos += 1
readWin[winPos%2] = line
# Reinitialize first line
if readOne==True:
winPos -= 1
while (readWin[winPos%2].pos == readWin[(winPos-1)%2].pos and fileDone==False and readOne==False) or readOne == True:
if readNum % o.rOut == 0:
sys.stderr.write("Reads processed:" + str(readNum) + "\n")
try:
tag = readWin[winPos%2].qname.split('|')[1].split('/')[0] + (":1" if readWin[winPos%2].is_read1 == True else (":2" if readWin[winPos%2].is_read2 == True else ":se"))
tagDict[tag] += 1
except:
print readNum
raise
# Overlap filter: filters out overlapping reads (with --filt o)
overlap=False
if 'o' in o.filt:
if readWin[winPos%2].pos < readWin[winPos%2].mpos and readWin[winPos%2].mpos < readWin[winPos%2].pos + o.read_length and int(readWin[winPos%2].flag) in (83, 99, 147, 163):
overlap=True
elif readWin[winPos%2].pos > readWin[winPos%2].mpos and readWin[winPos%2].pos < readWin[winPos%2].mpos + o.read_length and int(readWin[winPos%2].flag) in (83, 99, 147, 163):
overlap=True
elif readWin[winPos%2].pos==readWin[winPos%2].mpos and int(readWin[winPos%2].flag) in (83, 99, 147, 163):
overlap=True
readNum +=1
# Softclip filter: filters out softclipped reads (with --filt s)
softClip=False
if 's' in o.filt:
if readWin[winPos%2].cigar != None:
for tupple in readWin[winPos%2].cigar:
if tupple[0]==4:
softClip=True
# Check if the given read is good data
if int( readWin[winPos%2].flag ) in goodFlag and overlap==False and softClip==False:
if ('A'*o.rep_filt in tag) or ('C'*o.rep_filt in tag) or ('G'*o.rep_filt in tag) or ('T'*o.rep_filt in tag):
# Check for bad barcodes
nM += 1
nonMap.write(readWin[winPos%2])
rT += 1
else :
# Add the sequence to the read dictionary
if tag not in readDict:
readDict[tag] = [readWin[winPos%2].flag, readWin[winPos%2].rname, readWin[winPos%2].pos, readWin[winPos%2].mrnm, readWin[winPos%2].mpos, readWin[winPos%2].isize,{str(readWin[winPos%2].cigar):[0,readWin[winPos%2].cigar]}]
if str(readWin[winPos%2].cigar) not in readDict[tag][6]:
readDict[tag][6][str(readWin[winPos%2].cigar)]=[0,readWin[winPos%2].cigar]
readDict[tag][6][str(readWin[winPos%2].cigar)].append(readWin[winPos%2].seq)
readDict[tag][6][str(readWin[winPos%2].cigar)][0]+=1
else:
nM += 1
nonMap.write(readWin[winPos%2])
if int(readWin[winPos%2].flag) not in goodFlag:
bF += 1
elif overlap == True:
oL += 1
elif softClip == True:
sC += 1
winPos += 1
if readOne == False:
try: # Keep StopIteration error from happening at the end of a file
readWin[winPos%2] = bamEntry.next() # Iterate the line
except:
fileDone = True # Tell the program that it has reached the end of the file
else:
readOne = False
else:
# Send reads to consensusMaker
readOne=True
for dictTag in readDict.keys(): # Extract sequences to send to the consensus maker
# Cigar string filtering
cigComp={}
for cigStr in readDict[dictTag][6].keys(): # Determin the most common cigar string
cigComp[cigStr]=readDict[dictTag][6][cigStr][0]
maxCig=max(cigComp)
if cigComp[maxCig] >= o.minmem:
if cigComp[maxCig] <= o.maxmem:
ConMade += 1
consensus = consensusMaker( readDict[dictTag][6][maxCig][2:], o.cutoff, o.read_length )
else:
ConMade += 1
consensus = consensusMaker(random.sample(readDict[dictTag][6][maxCig][2:], o.maxmem), o.cutoff, o.read_length)
for cigStr in readDict[dictTag][6].keys():
if cigStr != maxCig:
for n in xrange(2, len(readDict[dictTag][6][cigStr][2:])):
a = pysam.AlignedRead()
a.qname = dictTag
a.flag = readDict[dictTag][0]
a.seq = readDict[dictTag][6][cigStr][n]
a.rname = readDict[dictTag][1]
a.pos = readDict[dictTag][2]
a.mapq = 255
a.cigar = readDict[dictTag][6][cigStr][1]
a.mrnm = readDict[dictTag][3]
a.mpos=readDict[dictTag][4]
a.isize = readDict[dictTag][5]
a.qual = qualScore
outNC1.write(a)
LCC += 1
cigComp={}
# Filter out consensuses with too many Ns in them
if (consensus.count("N" )/ len(consensus) <= o.Ncutoff and 'n' in o.filt) or ('n' not in o.filt):
# Write a line to the consensusDictionary
a = pysam.AlignedRead()
a.qname = dictTag
a.flag = readDict[dictTag][0]
a.seq = consensus
a.rname = readDict[dictTag][1]
a.pos = readDict[dictTag][2]
a.mapq = 255
a.cigar = readDict[dictTag][6][maxCig][1]
a.mrnm = readDict[dictTag][3]
a.mpos=readDict[dictTag][4]
a.isize = readDict[dictTag][5]
a.qual = qualScore
# Write SSCSs to output BAM file in read pairs.
altTag=dictTag.replace(("1" if "1" in dictTag else "2"),("2" if "1" in dictTag else "1"))
if altTag in consensusDict:
if a.is_read1 == True:
outBam.write(a)
outBam.write(consensusDict.pop(altTag))
else:
outBam.write(consensusDict.pop(altTag))
outBam.write(a)
else:
consensusDict[dictTag]=a
else:
nC += 1
readDict={} # Reset the read dictionary
if o.read_type == 'd':
if o.isize != -1:
for consTag in consensusDict.keys():
if consensusDict[consTag].pos + o.isize < readWin[winPos%2].pos:
extraBam.write(consensusDict.pop(consTag))
UP += 1
# Write unpaired SSCSs
for consTag in consensusDict.keys():
if o.read_type == 'd':
extraBam.write(consensusDict.pop(consTag))
UP += 1
else:
outBam.write(consensusDict.pop(consTag))
# Close BAM files
inBam.close()
outBam.close()
nonMap.close()
outNC1.close()
if o.read_type == 'd':
extraBam.close()
# Write summary statistics
sys.stderr.write("Summary Statistics: \n")
sys.stderr.write("Reads processed:" + str(readNum) + "\n")
sys.stderr.write("Bad reads: %s\n" % nM)
sys.stderr.write("\tReads with Bad Flags: %s\n" % bF)
sys.stderr.write("\tOverlapping Reads: %s\n" % oL)
sys.stderr.write("\tSoftclipped Reads: %s\n" %sC)
sys.stderr.write("\tRepetitive Duplex Tag: %s\n" % rT)
sys.stderr.write("Reads with Less Common Cigar Strings: %s\n" % LCC)
sys.stderr.write("Consensuses Made: %s\n" % ConMade)
#sys.stderr.write("Unpaired Consensuses: %s\n" % UP)
sys.stderr.write("Consensuses with Too Many Ns: %s\n\n" % nC)
# Write the tag counts file.
tagFile = open( o.tagfile, "w" )
tagFile.write ( "\n".join( [ "%s\t%d" % ( SMI, tagDict[SMI] ) for SMI in sorted( tagDict.keys(), key=lambda x: tagDict[x], reverse=True ) ] ))
tagFile.close()
tagStats(o.tagfile)
if __name__ == "__main__":
main()
|
mwschmit/Duplex-Sequencing
|
ConsensusMaker.py
|
Python
|
bsd-3-clause
| 22,894
|
[
"pysam"
] |
133d1cfb0c7afae31590925892fefbcb6f26e70e1823943ff4fad7002543b8e1
|
'''
Created on Oct 28, 2014
@author: ic4
'''
import unittest
import mock
import serapis.irods.api_wrapper as irods_api
from serapis.irods import exceptions as irods_exc
from serapis.storage.irods import _data_types as irods_types
def run_ils_fake1(output):
m = mock.Mock()
m.return_value = output
class TestiRODSListOperations(unittest.TestCase):
ils_output = '/humgen/projects/serapis_staging:\n mercury 0 irods-ddn-gg07-9 9370 2014-07-18.12:03 & celery.log\n C- /humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3\n C- /humgen/projects/serapis_staging/537f67919bbf8f62fc5d9fb5'
# @mock.patch('serapis.irods.api_wrapper.iRODSListOperations._run_ils_long')
# def run_ils_fake(self):
# return self.ils_output
#
#
# @mock.patch('serapis.irods.api_wrapper.iRODSListOperations._run_ils_long', run_ils_fake())
# def test_list_files_in_coll(self):
# o = serapis.irods.api_wrapper.iRODSListOperations.list_files_in_coll('No matter what')
# print "lalala"+str(o)
# self.assertEquals(o, 1)
def test_process_file_line(self):
file_line = " ic4 3 irods-ddn-gg07-3 4295 2014-09-26.15:37 & users2.txt"
res = irods_api.iRODSListOperations._process_file_line(file_line)
expected = irods_types.FileLine(owner='ic4', replica_id='3', size='4295', resc_name='irods-ddn-gg07-3', timestamp='2014-09-26.15:37', is_paired=True, fname='users2.txt')
self.assertEqual(res, expected)
file_line = " serapis 0 irods-ddn-gg07-3 16639 2014-10-27.15:24 plot_snps.jpg"
res = irods_api.iRODSListOperations._process_file_line(file_line)
expected = irods_types.FileLine(owner='serapis', replica_id='0', resc_name='irods-ddn-gg07-3', size='16639', timestamp='2014-10-27.15:24', is_paired=False, fname='plot_snps.jpg')
self.assertEqual(res, expected)
file_line = " serapis 0 irods-ddn-gg07-3"
self.assertRaises(irods_exc.UnexpectedIRODSiCommandOutputException, irods_api.iRODSListOperations._process_file_line, file_line)
file_line = " serapis 0 irods-ddn-gg07-3 16639 2014-10-27.15:24 plot_snps.jpg\n serapis 0 irods-ddn-gg07-3 16639 2014-10-27.15:24 plot_snps.jpg"
self.assertRaises(irods_exc.UnexpectedIRODSiCommandOutputException, irods_api.iRODSListOperations._process_file_line, file_line)
#CollLine = namedtuple('CollLine', ['coll_name'])
def test_process_coll_line(self):
coll_line = ' C- /humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3'
res = irods_api.iRODSListOperations._process_coll_line(coll_line)
expected = irods_types.CollLine(coll_name='/humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3')
self.assertEqual(res, expected)
coll_line = ' /humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3'
self.assertRaises(irods_exc.UnexpectedIRODSiCommandOutputException, irods_api.iRODSListOperations._process_coll_line, coll_line)
def test_process_icmd_output(self):
ils_output = '/humgen/projects/serapis_staging:\n mercury 0 irods-ddn-gg07-9 9370 2014-07-18.12:03 & celery.log\n C- /humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3\n C- /humgen/projects/serapis_staging/537f67919bbf8f62fc5d9fb5'
res = irods_api.iRODSListOperations._process_icmd_output(ils_output)
expected = irods_types.CollListing(coll_list=[irods_types.CollLine(coll_name='/humgen/projects/serapis_staging/537f5ff69bbf8f62fc5d9fb3'),
irods_types.CollLine(coll_name='/humgen/projects/serapis_staging/537f67919bbf8f62fc5d9fb5')],
files_list=[irods_types.FileLine(owner='mercury', replica_id='0', resc_name='irods-ddn-gg07-9', size='9370',
timestamp='2014-07-18.12:03', is_paired=True, fname='celery.log')])
self.assertEqual(res, expected)
ils_output = '/humgen/projects/serapis_staging/542a73ee9bbf8f55ae187cce:\n mercury 0 irods-ddn-gg07-4 8207082116 2014-09-30.10:27 & 10:1-135534747.vcf.gz\n mercury 1 irods-ddn-rd10a-4 8207082116 2014-09-30.10:53 & 10:1-135534747.vcf.gz'
res = irods_api.iRODSListOperations._process_icmd_output(ils_output)
expected = irods_types.CollListing(coll_list=[], files_list=[irods_types.FileLine(owner='mercury', replica_id='0', resc_name='irods-ddn-gg07-4', size='8207082116', timestamp='2014-09-30.10:27', is_paired=True, fname='10:1-135534747.vcf.gz'),
irods_types.FileLine(owner='mercury', replica_id='1', resc_name='irods-ddn-rd10a-4', size='8207082116', timestamp='2014-09-30.10:53', is_paired=True,fname='10:1-135534747.vcf.gz')
])
self.assertEqual(res, expected)
class TestiRODSChecksumOperations(unittest.TestCase):
def test_process_icmd_output(self):
ichksum_output = ' Y:1-59373566.vcf.gz 30cd89134232c910664cc771bc42e7fd\nTotal checksum performed = 1, Failed checksum = 0'
res = irods_api.iRODSChecksumOperations._process_icmd_output(ichksum_output)
expected = irods_types.ChecksumResult(md5='30cd89134232c910664cc771bc42e7fd')
self.assertEqual(res, expected)
ichksum_output = 'ERROR'
self.assertRaises(irods_exc.UnexpectedIRODSiCommandOutputException, irods_api.iRODSChecksumOperations._process_icmd_output, ichksum_output)
class TestiRODSMetaQueryOperations(unittest.TestCase):
def test_process_output(self):
cmd_out = "collection: /seq/10100\ndataObj: 10100_8#0.bam\n----\ncollection: /seq/10100\ndataObj: 10100_8#0_phix.bam\n----\ncollection: /seq/10100\ndataObj: 10100_8#48.bam\n----\ncollection: /seq/10100\ndataObj: 10100_8#48_phix.bam\n"
res = irods_api.iRODSMetaQueryOperations._process_icmd_output(cmd_out)
expected = ["/seq/10100/10100_8#0.bam", "/seq/10100/10100_8#0_phix.bam", "/seq/10100/10100_8#48.bam", "/seq/10100/10100_8#48_phix.bam"]
print("EXPECTED: "+str(res))
self.assertSetEqual(set(res), set(expected))
cmd_out = 'No rows found'
res = irods_api.iRODSMetaQueryOperations._process_icmd_output(cmd_out)
expected = []
self.assertEqual(res, expected)
class TestiRODSMetaListOperations(unittest.TestCase):
def test_extract_attribute_from_line(self):
line = 'attribute: id_run\n'
res = irods_api.iRODSMetaListOperations._extract_attribute_from_line(line)
expected = 'id_run'
self.assertEqual(res, expected)
line = 'attribute: md5'
res = irods_api.iRODSMetaListOperations._extract_attribute_from_line(line)
expected = 'md5'
self.assertEqual(res, expected)
line = 'attribute: This is a long attribute name and with spaces'
res = irods_api.iRODSMetaListOperations._extract_attribute_from_line(line)
expected = 'This is a long attribute name and with spaces'
self.assertEqual(res, expected)
line = 'value: 2'
self.assertRaises(ValueError, irods_api.iRODSMetaListOperations._extract_attribute_from_line, line)
def test_extract_value_from_line(self):
line = 'value: /lustre/scratch109/srpipe/references/Danio_rerio/zv9/all/bwa/zv9_toplevel.fa'
res = irods_api.iRODSMetaListOperations._extract_value_from_line(line)
expected = '/lustre/scratch109/srpipe/references/Danio_rerio/zv9/all/bwa/zv9_toplevel.fa'
self.assertEqual(res, expected)
def test_process_icmd_output(self):
cmd_output = 'attribute: target\nvalue: 1\nunits:\n----\nattribute: id_run\nvalue: 10100\nunits:\n----\nattribute: sample_id\nvalue: 1513933\nunits:\n----\n'
res = irods_api.iRODSMetaListOperations._process_icmd_output(cmd_output)
expected = [irods_types.MetaAVU(attribute='target',value='1'),
irods_types.MetaAVU(attribute='id_run', value='10100'),
irods_types.MetaAVU(attribute='sample_id', value='1513933')
]
self.assertSetEqual(set(res), set(expected))
cmd_output = 'No rows found'
res = irods_api.iRODSMetaListOperations._process_icmd_output(cmd_output)
expected = []
self.assertEqual(res, expected)
# @wrappers.check_args_not_none
# def _process_icmd_output(cls, output):
# ''' This method takes the output of imeta command and converts it to a MetaAVU.'''
# avus_list = []
# lines = output.split('\n')
# attr_name, attr_val = None, None
# for line in lines:
# if not line.startswith('attribute'):
# attr_name = cls._extract_attribute_from_line(line)
# elif line.startswith('value: '):
# attr_val = cls._extract_value_from_line(line)
# if not attr_val:
# raise ValueError("Attirbute: "+attr_name+" has a None value!")
#
# if attr_name and attr_val:
# avus_list.append(irods_types.MetaAVU(attr_name, attr_val))
# attr_name, attr_val = None, None
# return avus_list
|
wtsi-hgi/serapis
|
tests/irods/test_api_wrapper.py
|
Python
|
agpl-3.0
| 9,717
|
[
"BWA"
] |
3b00a8e29d7c53da0ae031652a20bd4fe257df05d502c6f560bf0b0b707d2a61
|
'''
Takes a TLE at a certain time epoch and then computes the state vectors and
hence orbital elements at every time epoch (at every second) for the next 8
hours.
'''
import numpy as np
import math
import sys
import os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
from kep_determination.gibbs_method import *
pi = np.pi
meu = 398600.4418
two_pi = 2*pi
min_per_day = 1440
ae = 1
tothrd = 2.0/3.0
XJ3 = -2.53881e-6
e6a = 1.0E-6
xkmper = 6378.135
ge = 398600.8 # Earth gravitational constant
CK2 = 1.0826158e-3/2.0
CK4 = -3.0*-1.65597e-6/8.0
def rev_per_day(axis):
# mean motion (revolution per day)
try:
t = 2*pi*math.sqrt(abs(axis)**3/meu)
n = 1/t
n = n*86400 # 86400 seconds in a day
return n
except:
pass
class Error(Exception):
'''Base class for the exceptions.'''
pass
class FlagCheckError(Error):
'''Raised when compute_necessary_xxx() function is not called.'''
def __init__(self):
print("Error: Call compute_necessary_kep() or compute_necessary_tle() function of the class SGP4\n\
before calling propagate().\n\n\
Function Declaration:\n\n\
compute_necessary_kep(list, float)\n\
Parameter 1:List of keplerian elements (semi-major axis, inclination, ascension,\n\
eccentricity, perigee, anomaly)\n\
Parameter 2:bstar drag term\n\
Returns: NIL\n\n\
compute_necessary_tle(str, str)\n\
Parameter 1: First line of the TLE\n\
Parameter 2: Second line of the TLE\n\
Returns: NIL\n")
class SGP4(object):
def __init__(self):
'''Initializes flag variable to check for FlagCheckError (custom exception).'''
self.flag = 0
def compute_necessary_kep(self, kep, b_star=0.21109E-4):
'''
Initializes the necessary class variables using keplerian elements which
are needed in the computation of the propagation model.
Args:
kep (list): kep elements in order [axis, inclination, ascension, eccentricity, perigee, anomaly]
b_star (float): bstar drag term
Returns:
NIL
'''
self.flag = 1
self.xincl = float(kep[1]) * (pi/180) # in degree
self.xnodeo = float(kep[2]) * (pi/180)
self.eo = float(kep[3])
self.omegao = float(kep[4]) * (pi/180)
self.xmo = float(kep[5]) * (pi/180)
t = 2*pi*math.sqrt(kep[0]**3/meu)
n = 1/t
n = n*86400 # 86400 seconds in a day
self.xno = n*two_pi/min_per_day
self.bstar = b_star
# print(self.xmo,self.xnodeo,self.omegao,self.xincl,self.eo,self.xno,self.bstar)
def compute_necessary_tle(self, line1, line2):
'''
Initializes the necessary class variables using TLE which are needed in
the computation of the propagation model.
Args:
line1 (str): line 1 of the TLE
line2 (str): line 2 of the TLE
Returns:
NIL
'''
self.flag = 1
self.xmo = float(''.join(line2[43:51])) * (pi/180)
self.xnodeo = float(''.join(line2[17:25])) * (pi/180)
self.omegao = float(''.join(line2[34:42])) * (pi/180)
self.xincl = float(''.join(line2[8:16])) * (pi/180)
self.eo = float('0.'+str(''.join(line2[26:33])))
self.xno = float(''.join(line2[52:63]))*two_pi/min_per_day
self.bstar = int(''.join(line1[53:59]))*(1e-5)*(10**int(''.join(line1[59:61])))
# print(self.xmo,self.xnodeo,self.omegao,self.xincl,self.eo,self.xno,self.bstar)
def propagate(self, t1, t2):
'''
Invokes the function to compute state vectors and organises the final result.
The function first checks if compute_necessary_xxx() is called or not if
not then a custom exception is raised stating that call this function
first. Then it computes the state vector for the next 8 hours (28800
seconds in 8 hours) at every time epoch (28800 time epcohs) using the
sgp4 propagation model. The values of state vector is formatted upto
five decimal points and then all the state vectors got appended in a
list which stores the final output.
Args:
t1 (int): start time epoch
t2 (int): end time epoch
Returns:
numpy.ndarray: vector containing all state vectors
'''
try:
if(self.flag == 0):
raise FlagCheckError
except FlagCheckError:
sys.exit()
i = t1
size = t2-t1+1
final = np.zeros((size,6))
# gibbs = Gibbs()
while(i <= t2):
tsince = i
pos, vel = self.propagation_model(tsince)
data = [pos[0], pos[1], pos[2], vel[0], vel[1], vel[2]]
data = [float("{0:.5f}".format(i)) for i in data]
# ele = gibbs.orbital_elements(pos, vel)
# print(str(tsince) + " - " + str(ele))
# print(str(tsince) + " - " + str(pos) + " " + str(vel))
final[i,:] = data
i = i + 1
# del(gibbs)
return final
def propagation_model(self, tsince):
'''
From the time epoch and information from TLE, applies SGP4 on it.
The function applies the Simplified General Perturbations algorithm
SGP4 on the information extracted from the TLE at the given time epoch
'tsince' and computes the state vector from it.
Args:
tsince (int): time epoch
Returns:
tuple: position and velocity vector
'''
# Constants
s = ae + 78 / xkmper
qo = ae + 120 / xkmper
xke = math.sqrt((3600 * ge)/(xkmper**3))
qoms2t = ((qo-s)**2)**2
temp2 = xke/self.xno
a1 = temp2**tothrd
cosio = math.cos(self.xincl)
theta2 = cosio**2
x3thm1 = 3*theta2-1
eosq = self.eo**2
betao2 = 1-eosq
betao = math.sqrt(betao2)
del1 = (1.5*CK2*x3thm1)/((a1**2)*betao*betao2)
ao = a1*(1-del1*((1.0/3.0)+del1*(1+(134.0/81.0)*del1)))
delo = 1.5*CK2*x3thm1/((ao**2)*betao*betao2)
xnodp = (self.xno)/(1+delo)
aodp = ao/(1-delo)
# Initialization
isimp = 0
if((aodp*(1-self.eo)/ae) < (220.0/xkmper+ae)):
isimp = 1
s4 = s
qoms24 = qoms2t
perigee = (aodp*(1-self.eo)-ae)*xkmper
if(perigee < 156):
s4 = perigee - 78
if(perigee <= 98):
s4 = 20
qoms24 = ((120-s4)*ae/xkmper)**4
s4 = s4/xkmper+ae
pinvsq = 1/((aodp**2)*(betao2**2))
tsi = 1/(aodp-s4)
eta = aodp*(self.eo)*tsi
etasq = eta**2
eeta = (self.eo)*eta
psisq = abs(1-etasq)
coef = qoms24*(tsi**4)
coef1 = coef/(psisq**3.5)
c2 = coef1*xnodp*(aodp*(1+1.5*etasq+eeta*(4+etasq))+0.75*CK2*tsi/psisq*x3thm1*(8+3*etasq*(8+etasq)))
c1 = self.bstar*c2
sinio = math.sin(self.xincl)
a3ovk2 = -XJ3/CK2*(ae**3)
c3 = coef*tsi*a3ovk2*xnodp*ae*sinio/self.eo
x1mth2 = 1-theta2
c4 = 2*xnodp*coef1*aodp*betao2*(eta*(2.0+0.5*etasq)+(self.eo)*(0.5+2*etasq)-2*CK2*tsi/(aodp*psisq)*(-3*x3thm1*(1-2*eeta+etasq*(1.5-0.5*eeta))+0.75*x1mth2*(2*etasq-eeta*(1+etasq))*math.cos(2*self.omegao)))
c5 = 2*coef1*aodp*betao2*(1+2.75*(etasq+eeta)+eeta*etasq)
theta4 = theta2**2
temp1 = 3*CK2*pinvsq*xnodp
temp2 = temp1*CK2*pinvsq
temp3 = 1.25*CK4*(pinvsq**2)*xnodp
xmdot = xnodp+0.5*temp1*betao*x3thm1+0.0625*temp2*betao*(13-78*theta2+137*theta4)
x1m5th = 1-5*theta2
omgdot = -0.5*temp1*x1m5th+0.0625*temp2*(7-114*theta2+395*theta4)+temp3*(3-36*theta2+49*theta4)
xhdot1 = -temp1*cosio
xnodot = xhdot1+(0.5*temp2*(4-19*theta2)+2*temp3*(3-7*theta2))*cosio
omgcof = self.bstar*c3*math.cos(self.omegao)
xmcof = -(2/3)*coef*(self.bstar)*ae/eeta
xnodcf = 3.5*betao2*xhdot1*c1
t2cof = 1.5*c1
xlcof = 0.125*a3ovk2*sinio*(3+5*cosio)/(1+cosio)
aycof = 0.25*a3ovk2*sinio
delmo = (1+eta*math.cos(self.xmo))**3
sinmo = math.sin(self.xmo)
x7thm1 = 7*theta2-1
if(isimp == 0):
c1sq = c1**2
d2 = 4*aodp*tsi*c1sq
temp = d2*tsi*c1/3
d3 = (17*aodp+s4)*temp
d4 = 0.5*temp*aodp*tsi*(221*aodp+31*4)*c1
t3cof = d2+2*c1sq
t4cof = 0.25*(3*d3+c1*(12*d2+10*c1sq))
t5cof = 0.2*(3*d4+12*c1*d3+6*(d2**2)+15*c1sq*(2*d2+c1sq))
xmdf = self.xmo+xmdot*tsince
omgadf = self.omegao+omgdot*tsince
xnoddf = self.xnodeo+xnodot*tsince
omega = omgadf
xmp = xmdf
tsq = tsince**2
xnode = xnoddf+xnodcf*tsq
tempa = 1 - c1*tsince
tempe = self.bstar*c4*tsince
templ = t2cof*tsq
if(isimp == 0):
delomg = omgcof*tsince
delm = xmcof*(((1+eta*math.cos(xmdf))**3)-delmo)
temp = delomg+delm
xmp = xmdf+temp
omega = omgadf-temp
tcube = tsq*tsince
tfour = tsince*tcube
tempa = tempa-d2*tsq-d3*tcube-d4*tfour
tempe = tempe+self.bstar*c5*(math.sin(xmp)-sinmo)
templ = templ+t3cof*tcube+tfour*(t4cof+tsince*t5cof)
a = aodp*(tempa**2)
e = self.eo-tempe
xl = xmp+omega+xnode+xnodp*templ
beta = math.sqrt(1-e**2)
xn = xke/(a**1.5)
axn = e*math.cos(omega)
temp = 1/(a*(beta**2))
xll = temp*xlcof*axn
aynl = temp*aycof
xlt = xl+xll
ayn = e*math.sin(omega)+aynl
diff = xlt - xnode
capu = diff - math.floor(diff/two_pi) * two_pi
if(capu < 0):
capu = capu + two_pi
temp2 = capu
i = 1
while(1):
sinepw = math.sin(temp2)
cosepw = math.cos(temp2)
temp3 = axn*sinepw
temp4 = ayn*cosepw
temp5 = axn*cosepw
temp6 = ayn*sinepw
epw = (capu-temp4+temp3-temp2)/(1-temp5-temp6)+temp2
temp7 = temp2
temp2 = epw
i = i + 1
if((i>10) | (abs(epw-temp7)<=e6a)):
break
ecose = temp5+temp6
esine = temp3-temp4
elsq = axn**2 + ayn**2
temp = 1-elsq
pl = a*temp
r = a*(1-ecose)
temp1 = 1/r
rdot = xke*math.sqrt(a)*esine*temp1
rfdot = xke*math.sqrt(pl)*temp1
temp2 = a*temp1
betal = math.sqrt(temp)
temp3 = 1/(1+betal)
cosu = temp2*(cosepw-axn+ayn*esine*temp3)
sinu = temp2*(sinepw-ayn-axn*esine*temp3)
u = math.atan2(sinu, cosu)
if(u < 0):
u = u + two_pi
sin2u = 2*sinu*cosu
cos2u = 2*(cosu**2)-1
temp = 1/pl
temp1 = CK2*temp
temp2 = temp1*temp
rk = r*(1-1.5*temp2*betal*x3thm1)+0.5*temp1*x1mth2*cos2u
uk = u-0.25*temp2*x7thm1*sin2u
xnodek = xnode+1.5*temp2*cosio*sin2u
xinck = self.xincl+1.5*temp2*cosio*sinio*cos2u
rdotk = rdot-xn*temp1*x1mth2*sin2u
rfdotk = rfdot+xn*temp1*(x1mth2*cos2u+1.5*x3thm1)
MV = [-math.sin(xnodek)*math.cos(xinck), math.cos(xnodek)*math.cos(xinck), math.sin(xinck)]
NV = [math.cos(xnodek), math.sin(xnodek), 0]
UV = [0, 0, 0]
VV = [0, 0, 0]
for i in range(3):
UV[i] = MV[i]*math.sin(uk) + NV[i]*math.cos(uk)
VV[i] = MV[i]*math.cos(uk) - NV[i]*math.sin(uk)
pos = [0, 0, 0]
vel = [0, 0, 0]
for i in range(3):
pos[i] = rk*UV[i]*xkmper
vel[i] = (rdotk*UV[i] + rfdotk*VV[i])*xkmper/60
return pos, vel
@classmethod
def recover_tle(self, pos, vel):
"""
Recovers TLE back from state vector.
First of all, only necessary information (which are inclination, right
ascension of the ascending node, eccentricity, argument of perigee, mean
anomaly, mean motion and bstar) that are needed in the computation of
SGP4 propagation model are recovered. It is using a general format of
TLE. State vectors are used to find orbital elements which are then
inserted into the TLE format at their respective positions. Mean motion
and bstar is calculated separately as it is not a part of orbital elements.
Format of TLE: x denotes that there is a digit, c denotes a character value,
underscore(_) denotes a plus/minus(+/-) sign value and period(.) denotes
a decimal point.
Args:
pos (list): position vector
vel (list): velocity vector
Returns:
list: line1 and line2 of TLE
"""
# TLE format
line1 = "1 xxxxxc xxxxxccc xxxxx.xxxxxxxx _.xxxxxxxx _xxxxx_x _xxxxx_x x xxxxx"
line2 = "2 xxxxx xxx.xxxx xxx.xxxx xxxxxxx xxx.xxxx xxx.xxxx xx.xxxxxxxxxxxxxx"
# line 1
# line1 = list(line1)
# line1 = "".join(line1)
# line 2
line2 = list(line2)
gibbs = Gibbs()
ele = gibbs.orbital_elements(pos, vel)
del(gibbs)
# inclination
inc = float("{0:.4f}".format(ele[2]))
if(inc < 10.0):
inc = str(" ") + str(inc)
elif(inc < 100.0):
inc = str(" ") + str(inc)
line2[8:16] = str(inc)
# right ascension of ascending node
asc = float("{0:.4f}".format(ele[4]))
if(asc < 10.0):
asc = str(" ") + str(asc)
elif(asc < 100.0):
asc = str(" ") + str(asc)
line2[17:25] = str(asc)
# eccentricity
e = list("{0:.7f}".format(ele[1]))
e = str("".join(e[2:]))
line2[26:33] = e
# argument of perigee
per = float("{0:.4f}".format(ele[3]))
if(per < 10.0):
per = str(" ") + str(per)
elif(per < 100.0):
per = str(" ") + str(per)
line2[34:42] = str(per)
# mean anomaly
anom = float("{0:.4f}".format(ele[5]))
if(anom < 10.0):
anom = str(" ") + str(anom)
elif(anom < 100.0):
anom = str(" ") + str(anom)
line2[43:51] = str(anom)
# mean motion (revolution per day)
t = 2*pi*math.sqrt(ele[0]**3/meu)
n = 1/t
n = n*86400 # 86400 seconds in a day
n = float("{0:.8f}".format(n))
if(n < 10.0):
n = str(" ") + str(n)
line2[52:63] = str(n)
line2 = "".join(line2)
tle = [line1, line2]
return tle
# if __name__ == "__main__":
# line1 = "1 88888U 80275.98708465 .00073094 13844-3 66816-4 0 8"
# line2 = "2 88888 72.8435 115.9689 0086731 52.6988 110.5714 16.05824518 105"
#
# # using compute_necessary_tle()
# obj = SGP4()
# obj.compute_necessary_tle(line1,line2)
# state_vec = obj.propagate(0, 28800)
#
# # using compute_necessary_kep()
# ele = [6641.785974865588, 72.8538850731544, 115.96228572568285, \
# 0.009668565050958889, 59.42251148052069, 104.89188402366825]
# obj.compute_necessary_kep(ele)
# state_vec = obj.propagate(0, 28800)
#
# # Recover TLE from state vector
# pos = [state_vec[0][0], state_vec[0][1], state_vec[0][2]]
# vel = [state_vec[0][3], state_vec[0][4], state_vec[0][5]]
# tle = obj.recover_tle(pos, vel)
#
# del(obj)
|
aerospaceresearch/orbitdeterminator
|
orbitdeterminator/propagation/sgp4.py
|
Python
|
mit
| 15,629
|
[
"EPW"
] |
08c8979e57f815e1f729963823eb70e68b9b2a0d9c4ecba1122eff64f26e53ab
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""
Basis Pursuit DeNoising
=======================
This example demonstrates the use of classes :class:`.admm.bpdn.BPDN` and :class:`.pgm.bpdn.BPDN` to solve the Basis Pursuit DeNoising (BPDN) problem :cite:`chen-1998-atomic`
$$\mathrm{argmin}_\mathbf{x} \; (1/2) \| D \mathbf{x} - \mathbf{s} \|_2^2 + \lambda \| \mathbf{x} \|_1 \;,$$
where $D$ is the dictionary, $\mathbf{x}$ is the sparse representation, and $\mathbf{s}$ is the signal to be represented. In this example the BPDN problem is used to estimate the reference sparse representation that generated a signal from a noisy version of the signal.
"""
from __future__ import print_function
from builtins import input
import numpy as np
import sporco.admm.bpdn as abpdn
import sporco.pgm.bpdn as pbpdn
from sporco.pgm.backtrack import BacktrackRobust
from sporco import plot
"""
Configure problem size, sparsity, and noise level.
"""
N = 512 # Signal size
M = 4*N # Dictionary size
L = 32 # Number of non-zero coefficients in generator
sigma = 0.5 # Noise level
"""
Construct random dictionary, reference random sparse representation, and test signal consisting of the synthesis of the reference sparse representation with additive Gaussian noise.
"""
# Construct random dictionary and random sparse coefficients
np.random.seed(12345)
D = np.random.randn(N, M)
x0 = np.zeros((M, 1))
si = np.random.permutation(list(range(0, M-1)))
x0[si[0:L]] = np.random.randn(L, 1)
# Construct reference and noisy signal
s0 = D.dot(x0)
s = s0 + sigma*np.random.randn(N,1)
"""
Set regularisation parameter.
"""
lmbda = 2.98e1
"""
Set options for ADMM solver.
"""
opt_admm = abpdn.BPDN.Options({'Verbose': False, 'MaxMainIter': 500,
'RelStopTol': 1e-3, 'AutoRho': {'RsdlTarget': 1.0}})
"""
Initialise and run ADMM solver object.
"""
ba = abpdn.BPDN(D, s, lmbda, opt_admm)
xa = ba.solve()
print("ADMM BPDN solve time: %.2fs" % ba.timer.elapsed('solve'))
"""
Set options for PGM solver.
"""
opt_pgm = pbpdn.BPDN.Options({'Verbose': True, 'MaxMainIter': 50, 'L': 9e2,
'Backtrack': BacktrackRobust()})
"""
Initialise and run PGM solver.
"""
bp = pbpdn.BPDN(D, s, lmbda, opt_pgm)
xp = bp.solve()
print("PGM BPDN solve time: %.2fs" % bp.timer.elapsed('solve'))
"""
Plot comparison of reference and recovered representations.
"""
plot.plot(np.hstack((x0, xa, xp)), alpha=0.5, title='Sparse representation',
lgnd=['Reference', 'Reconstructed (ADMM)',
'Reconstructed (PGM)'])
"""
Plot functional value, residual, and L
"""
itsa = ba.getitstat()
itsp = bp.getitstat()
fig = plot.figure(figsize=(21, 7))
plot.subplot(1, 3, 1)
plot.plot(itsa.ObjFun, xlbl='Iterations', ylbl='Functional', fig=fig)
plot.plot(itsp.ObjFun, xlbl='Iterations', ylbl='Functional',
lgnd=['ADMM', 'PGM'], fig=fig)
plot.subplot(1, 3, 2)
plot.plot(itsa.PrimalRsdl, ptyp='semilogy', xlbl='Iterations', ylbl='Residual',
fig=fig)
plot.plot(itsa.DualRsdl, ptyp='semilogy', fig=fig)
plot.plot(itsp.Rsdl, ptyp='semilogy', lgnd=['Primal Residual (ADMM)',
'Dual Residual (ADMM)','Residual (PGM)'], fig=fig)
plot.subplot(1, 3, 3)
plot.plot(itsa.Rho, xlbl='Iterations', ylbl='Algorithm Parameter', fig=fig)
plot.plot(itsp.L, lgnd=[r'$\rho$ (ADMM)', '$L$ (PGM)'], fig=fig)
fig.show()
# Wait for enter on keyboard
input()
|
bwohlberg/sporco
|
examples/scripts/sc/bpdn_cmp.py
|
Python
|
bsd-3-clause
| 3,586
|
[
"Gaussian"
] |
4bb6976f2cd2f501fc02bc92b915163de52da2fc8c4ef295e5795d9462383a62
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def test_duccsd():
"""Test projective factorized UCCSDT on Ne using RHF/cc-pVDZ orbitals"""
import pytest
import forte.proc.scc as scc
import forte
import psi4
ref_energy = -128.677997285129
geom = "Ne"
scf_energy, psi4_wfn = forte.utils.psi4_scf(geom, basis='cc-pVDZ', reference='RHF')
forte_objs = forte.utils.prepare_forte_objects(psi4_wfn, mo_spaces={'FROZEN_DOCC': [1, 0, 0, 0, 0, 0, 0, 0]})
calc_data = scc.run_cc(
forte_objs['as_ints'],
forte_objs['scf_info'],
forte_objs['mo_space_info'],
cc_type='ducc',
max_exc=2,
e_convergence=1.0e-10
)
psi4.core.clean()
energy = calc_data[-1][1]
print(f' HF energy: {scf_energy}')
print(f' DUCCSD energy: {energy}')
print(f' E - Eref: {energy - ref_energy}')
assert energy == pytest.approx(ref_energy, 1.0e-11)
if __name__ == "__main__":
test_duccsd()
|
evangelistalab/forte
|
tests/pytest/sparse_ci/srcc/test_duccsd.py
|
Python
|
lgpl-3.0
| 988
|
[
"Psi4"
] |
170b29d4a0d9fa0a8d4a1e7467522afc01bbeaca17ad872de3c936c2c063a854
|
D=16
subdim=4
N=100
seed=7
import nef.nef_theano as nef
import nef.convolution
import hrr
import math
import random
random.seed(seed)
vocab=hrr.Vocabulary(D,max_similarity=0.1)
net=nef.Network('Question Answering') #Create the network object
net.make('A',1,D,mode='direct') #Make some pseudo populations (so they
#run well on less powerful machines):
#1 neuron, 16 dimensions, direct mode
net.make('B',1,D,mode='direct')
net.make_array('C',N,D/subdim,dimensions=subdim,quick=True,radius=1.0/math.sqrt(D))
#Make a real population, with 100 neurons per
#array element and D/subdim elements in the array
#each with subdim dimensions, set the radius as
#appropriate for multiplying things of this
#dimension
net.make('E',1,D,mode='direct')
net.make('F',1,D,mode='direct')
conv1=nef.convolution.make_convolution(net,'*','A','B','C',N,
quick=True) #Make a convolution network using the construct populations
conv2=nef.convolution.make_convolution(net,'/','C','E','F',N,
invert_second=True,quick=True) #Make a 'correlation' network (by using
#convolution, but inverting the second
#input)
CIRCLE=vocab.parse('CIRCLE').v #Add elements to the vocabulary to use
BLUE=vocab.parse('BLUE').v
RED=vocab.parse('RED').v
SQUARE=vocab.parse('SQUARE').v
ZERO=[0]*D
# Create the inputs
inputA={}
inputA[0.0]=RED
inputA[0.5]=BLUE
inputA[1.0]=RED
inputA[1.5]=BLUE
inputA[2.0]=RED
inputA[2.5]=BLUE
inputA[3.0]=RED
inputA[3.5]=BLUE
inputA[4.0]=RED
inputA[4.5]=BLUE
net.make_input('inputA',inputA)
net.connect('inputA','A')
inputB={}
inputB[0.0]=CIRCLE
inputB[0.5]=SQUARE
inputB[1.0]=CIRCLE
inputB[1.5]=SQUARE
inputB[2.0]=CIRCLE
inputB[2.5]=SQUARE
inputB[3.0]=CIRCLE
inputB[3.5]=SQUARE
inputB[4.0]=CIRCLE
inputB[4.5]=SQUARE
net.make_input('inputB',inputB)
net.connect('inputB','B')
inputE={}
inputE[0.0]=ZERO
inputE[0.2]=CIRCLE
inputE[0.35]=RED
inputE[0.5]=ZERO
inputE[0.7]=SQUARE
inputE[0.85]=BLUE
inputE[1.0]=ZERO
inputE[1.2]=CIRCLE
inputE[1.35]=RED
inputE[1.5]=ZERO
inputE[1.7]=SQUARE
inputE[1.85]=BLUE
inputE[2.0]=ZERO
inputE[2.2]=CIRCLE
inputE[2.35]=RED
inputE[2.5]=ZERO
inputE[2.7]=SQUARE
inputE[2.85]=BLUE
inputE[3.0]=ZERO
inputE[3.2]=CIRCLE
inputE[3.35]=RED
inputE[3.5]=ZERO
inputE[3.7]=SQUARE
inputE[3.85]=BLUE
inputE[4.0]=ZERO
inputE[4.2]=CIRCLE
inputE[4.35]=RED
inputE[4.5]=ZERO
inputE[4.7]=SQUARE
inputE[4.85]=BLUE
net.make_input('inputE',inputE)
net.connect('inputE','E')
net.add_to_nengo()
|
jaberg/nengo
|
examples/question.py
|
Python
|
mit
| 2,681
|
[
"NEURON"
] |
8fcc4e707435b86c039c497f2622c3d6f7cf3cfcce6f931f06073f5899f00fb3
|
#!/usr/bin/env python
########################################################################
# File : dirac-wms-job-get-jdl
# Author : Stuart Paterson
########################################################################
"""
Retrieve the current JDL of a DIRAC job
Usage:
dirac-wms-job-get-jdl [options] ... JobID ...
Arguments:
JobID: DIRAC Job ID
Example:
$ dirac-wms-job-get-jdl 1
{'Arguments': '-ltrA',
'CPUTime': '86400',
'DIRACSetup': 'EELA-Production',
'Executable': '/bin/ls',
'JobID': '1',
'JobName': 'DIRAC_vhamar_602138',
'JobRequirements': '[OwnerDN = /O=GRID-FR/C=FR/O=CNRS/OU=CPPM/CN=Vanessa Hamar;
OwnerGroup = eela_user;
Setup = EELA-Production;
UserPriority = 1;
CPUTime = 0 ]',
'OutputSandbox': ['std.out', 'std.err'],
'Owner': 'vhamar',
'OwnerDN': '/O=GRID-FR/C=FR/O=CNRS/OU=CPPM/CN=Vanessa Hamar',
'OwnerGroup': 'eela_user',
'OwnerName': 'vhamar',
'Priority': '1'}
"""
import DIRAC
from DIRAC.Core.Utilities.DIRACScript import DIRACScript as Script
@Script()
def main():
original = False
Script.registerSwitch("O", "Original", "Gets the original JDL")
# Registering arguments will automatically add their description to the help menu
Script.registerArgument(["JobID: DIRAC Job ID"])
sws, args = Script.parseCommandLine(ignoreErrors=True)
for switch in sws:
if switch[0] == "Original" or switch[0] == "O":
original = True
from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments
dirac = Dirac()
exitCode = 0
errorList = []
for job in parseArguments(args):
result = dirac.getJobJDL(job, original=original, printOutput=True)
if not result["OK"]:
errorList.append((job, result["Message"]))
exitCode = 2
for error in errorList:
print("ERROR %s: %s" % error)
DIRAC.exit(exitCode)
if __name__ == "__main__":
main()
|
ic-hep/DIRAC
|
src/DIRAC/Interfaces/scripts/dirac_wms_job_get_jdl.py
|
Python
|
gpl-3.0
| 2,025
|
[
"DIRAC"
] |
8c2599dfdd8783af36c3d7fde241a01102489bbb983e3e7ca4dd0d7663e4ab95
|
from __future__ import unicode_literals
import numpy as np
from astropy.io import ascii, fits
import matplotlib as mp
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import pandas as pd
import pylab as py
import scipy as sp
from scipy import optimize
import string as s
from statsmodels import robust
import statsmodels as stat
import os
import glob
#latex for matplotlib
#os.environ['PATH'] = os.environ['PATH'] + ':/usr/texbin'
from matplotlib import rc
mp.rcParams['text.usetex']=True
mp.rcParams['text.latex.unicode']=True
rc('text', usetex=True)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
#turn off some pandas warnings
pd.options.mode.chained_assignment = None
def rest_colours_df(U_spec, V_spec, J_spec, string):
U = pd.Series(data=U_spec['L153'], index=U_spec['id'])
V = pd.Series(data=V_spec['L155'], index=V_spec['id'])
J = pd.Series(data=J_spec['L161'], index=J_spec['id'])
rest_colours = pd.DataFrame(U,columns=['U'+string] )
rest_colours['V'+string] = V
rest_colours['J'+string] = V
return rest_colours
def prepare_cosmos():
COSMOS_mastertable, ZF_cat, ZF_EAZY, ZF_FAST, U_spec, V_spec, J_spec,\
U_photo, V_photo, J_photo, UV_lee, VJ_lee, UV_IR_SFRs, MOSDEF_ZFOURGE,\
U_ZM,V_ZM, J_ZM, VUDS_ZFOURGE, VUDS_extra, U_ZV, V_ZV, J_ZV = open_cosmos_files()
ZF = pd.DataFrame(np.asarray(ZF_cat), index=ZF_cat['id'])
ZF_EAZY = pd.DataFrame(np.asarray(ZF_EAZY), index=ZF_EAZY['id'])
ZF_FAST = pd.DataFrame(np.asarray(ZF_FAST), index=ZF_FAST['id'])
rest_colours_spec = rest_colours_df(U_spec, V_spec, J_spec, '_rest')
ZFOURGE = pd.merge(ZF, rest_colours_spec,how='left', left_index=True, right_index=True )
ZFOURGE.set_index(ZFOURGE.id, inplace=True)
rest_colours_photo = rest_colours_df(U_photo, V_photo, J_photo, '_rest')
ZFOURGE['U_rest_photo'] = rest_colours_photo['U_rest']
ZFOURGE['V_rest_photo'] = rest_colours_photo['V_rest']
ZFOURGE['J_rest_photo'] = rest_colours_photo['J_rest']
UV_lee = pd.Series(data=-2.5*np.log10(UV_lee['L153']/UV_lee['L155']), index=UV_lee['id'])
VJ_lee = pd.Series(data=-2.5*np.log10(VJ_lee['L155']/VJ_lee['L161']), index=VJ_lee['id'])
rest_colours_photo = pd.DataFrame(UV_lee,columns=['UV_rest_Lee'] )
rest_colours_photo['VJ_rest_Lee'] = VJ_lee
ZFOURGE['UV_rest_Lee'] = rest_colours_photo['UV_rest_Lee']
ZFOURGE['VJ_rest_Lee'] = rest_colours_photo['VJ_rest_Lee']
ZFOURGE['U_rest'] = ZFOURGE['U_rest'].fillna(ZFOURGE['U_rest_photo'])
ZFOURGE['V_rest'] = ZFOURGE['V_rest'].fillna(ZFOURGE['V_rest_photo'])
ZFOURGE['J_rest'] = ZFOURGE['J_rest'].fillna(ZFOURGE['J_rest_photo'])
ZFOURGE['UV'] = -2.5*np.log10(ZFOURGE['U_rest']/ZFOURGE['V_rest'])
ZFOURGE['VJ'] = -2.5*np.log10(ZFOURGE['V_rest']/ZFOURGE['J_rest'])
ZFOURGE['UV_photo'] = -2.5*np.log10(ZFOURGE['U_rest_photo']/ZFOURGE['V_rest_photo'])
ZFOURGE['VJ_photo'] = -2.5*np.log10(ZFOURGE['V_rest_photo']/ZFOURGE['J_rest_photo'])
UV_IR_SFRs = pd.DataFrame(np.asarray(UV_IR_SFRs), index=UV_IR_SFRs['id'])
UV_IR_SFRs['UV_IR_SFRs'] = UV_IR_SFRs['sfr_luv'] + UV_IR_SFRs['sfr_luv']
ZFOURGE['firflag'] = UV_IR_SFRs['firflag']
ZFOURGE['UV_IR_SFRs'] = UV_IR_SFRs['UV_IR_SFRs']
ZFOURGE = pd.merge(ZFOURGE, ZF_EAZY,how='inner',left_on='id', right_on='id', left_index=True )
ZFOURGE = pd.merge(ZFOURGE, ZF_FAST,how='inner',left_on='id', right_on='id', left_index=True )
ZFOURGE.index = ZFOURGE.index.astype('str')
COSMOS = pd.merge(COSMOS_mastertable, ZFOURGE, how='left', left_index=True,right_index=True,
suffixes=('_ZFIRE', '_ZFOURGE'))
# print COSMOS
# COSMOS = COSMOS.drop('id', 1)
# COSMOS = COSMOS.set_index('Nameobj')
COSMOS['Ks_mag'] = 25-2.5*np.log10(COSMOS['Kstot'])
COSMOS['Ks_mag_err'] = 2.5*0.434*COSMOS['eKstot']/COSMOS['Kstot']
COSMOS['Nameobj'] = COSMOS.index
# spec_z = ZFIRE[(ZFIRE['conf']>1)]
# spec_z = pd.merge(ZFOURGE, spec_z, how='left', left_index=True, right_on='Nameobj', suffixes=('_ZFOURGE', '_ZFIRE'))
# spec_z = spec_z.set_index(spec_z['id'])
# spec_z['redshifts'] = spec_z['zspec']
# spec_z['redshifts'] = spec_z['redshifts'].fillna(spec_z['z_peak'])
# spec_z['masses'] = spec_z['lmass_ZFIRE']
# spec_z['masses'] = spec_z['masses'].fillna(spec_z['lmass_ZFOURGE'])
# spec_z['Av'] = spec_z['Av_ZFIRE']
# spec_z['Av'] = spec_z['Av'].fillna(spec_z['Av_ZFOURGE'])
MOSDEF_ZFOURGE = MOSDEF_ZFOURGE[MOSDEF_ZFOURGE['Z_MOSFIRE']>0] #remove non-detections from MOSDEF
MOSDEF_ZFOURGE = pd.DataFrame(np.asarray(MOSDEF_ZFOURGE), index=MOSDEF_ZFOURGE['id'])
MOSDEF_ZFOURGE = pd.merge(MOSDEF_ZFOURGE, ZF_FAST, how='inner', left_index=True, right_index=True)
MOSDEF_ZFOURGE.index = MOSDEF_ZFOURGE.index.astype('str')
rest_colours_spec = rest_colours_df(U_ZM, V_ZM, J_ZM, '_rest')
rest_colours_spec.index = rest_colours_spec.index.astype('str')
MOSDEF_ZFOURGE = pd.merge(MOSDEF_ZFOURGE, rest_colours_spec,how='left', left_index=True, right_index=True )
MOSDEF_ZFOURGE['UV'] = -2.5*np.log10(MOSDEF_ZFOURGE['U_rest']/MOSDEF_ZFOURGE['V_rest'])
MOSDEF_ZFOURGE['VJ'] = -2.5*np.log10(MOSDEF_ZFOURGE['V_rest']/MOSDEF_ZFOURGE['J_rest'])
VUDS_ZFOURGE = VUDS_ZFOURGE[(VUDS_ZFOURGE['zflags']==3) | (VUDS_ZFOURGE['zflags']==4) ] #select only >3-sigma detections
VUDS_ZFOURGE = pd.DataFrame(np.asarray(VUDS_ZFOURGE), index=VUDS_ZFOURGE['id'])
#do not delete this. Since mass information is given by VUDS this is used in a plot
#rather than the ZFOURGE matched sample. The ZFOURGE matched sample is used for UVJ colours etc.
VUDS_extra = VUDS_extra['vuds_ident','age','log_stellar_mass','log_star_formation_rate','k','ek', 'zflags', 'z_spec']
VUDS_extra = pd.DataFrame(np.asarray(VUDS_extra), index=VUDS_extra['vuds_ident'])
VUDS_ZFOURGE = pd.merge(VUDS_ZFOURGE, VUDS_extra, left_on='vuds_ident', right_on='vuds_ident', how='inner')
VUDS_ZFOURGE.set_index('id', inplace=True)
VUDS_ZFOURGE.index = VUDS_ZFOURGE.index.astype('str')
rest_colours_spec = rest_colours_df(U_ZV, V_ZV, J_ZV, '_rest')
rest_colours_spec.index = rest_colours_spec.index.astype('str')
VUDS_ZFOURGE = pd.merge(VUDS_ZFOURGE, rest_colours_spec,how='left', left_index=True, right_index=True )
VUDS_ZFOURGE['UV'] = -2.5*np.log10(VUDS_ZFOURGE['U_rest']/VUDS_ZFOURGE['V_rest'])
VUDS_ZFOURGE['VJ'] = -2.5*np.log10(VUDS_ZFOURGE['V_rest']/VUDS_ZFOURGE['J_rest'])
return COSMOS, MOSDEF_ZFOURGE, VUDS_ZFOURGE, VUDS_extra
def open_cosmos_files():
"""
This function opens files related to the COSMOS field.
Returns:
A lot of stuff. Check the code to see what it returns
"""
COSMOS_mastertable = pd.read_csv('data/zfire/zfire_cosmos_master_table_dr1.1.csv',index_col='Nameobj')
ZF_cat = ascii.read('data/zfourge/spitler2014/cosmos.v0.10.7.a.cat')
ZF_EAZY = ascii.read('data/zfourge/spitler2014/cosmos.v0.10.7.a.zout')
ZF_FAST = ascii.read('data/zfourge/spitler2014/cosmos.v0.10.7.a.fout')
#load in colours using spec-z
#only ZFIRE
U_spec = ascii.read('data/zfourge/uvj/specz_zfire/cosmos.v0.10.7.a.153.rf')
V_spec = ascii.read('data/zfourge/uvj/specz_zfire/cosmos.v0.10.7.a.155.rf')
J_spec = ascii.read('data/zfourge/uvj/specz_zfire/cosmos.v0.10.7.a.161.rf')
#load in colours using photo-z
U_photo = ascii.read('data/zfourge/uvj/photoz/cosmos.v0.10.7.a.153.rf')
V_photo = ascii.read('data/zfourge/uvj/photoz/cosmos.v0.10.7.a.155.rf')
J_photo = ascii.read('data/zfourge/uvj/photoz/cosmos.v0.10.7.a.161.rf')
#galaxy colours derived by Lee's catalogue
#This uses the older EAZY method of fitting colours
UV_lee = ascii.read('data/zfourge/spitler2014/cosmos.v0.10.7.a.153-155.rf')
VJ_lee = ascii.read('data/zfourge/spitler2014/cosmos.v0.10.7.a.155-161.rf')
UV_IR_SFRs = ascii.read('data/zfourge/sfrs/cosmos.sfr.v0.5.cat')
MOSDEF_ZFOURGE = ascii.read('data/catalogue_crossmatch/MOSDEF_COSMOS.dat')
#ZFIRE and MOSDEF colours
U_ZM = ascii.read('data/zfourge/uvj/specz_zfire_mosdef/cosmos.v0.10.7.a.153.rf')
V_ZM = ascii.read('data/zfourge/uvj/specz_zfire_mosdef/cosmos.v0.10.7.a.155.rf')
J_ZM = ascii.read('data/zfourge/uvj/specz_zfire_mosdef/cosmos.v0.10.7.a.161.rf')
VUDS_ZFOURGE = ascii.read('data/catalogue_crossmatch/VUDS_COSMOS.dat')
VUDS_extra = ascii.read('data/vuds/cesam_vuds_spectra_dr1_cosmos_catalog_additional_info.txt')
#ZFIRE and VUDS colours
U_ZV = ascii.read('data/zfourge/uvj/specz_vuds/cosmos.v0.10.7.a.153.rf')
V_ZV = ascii.read('data/zfourge/uvj/specz_vuds/cosmos.v0.10.7.a.155.rf')
J_ZV = ascii.read('data/zfourge/uvj/specz_vuds/cosmos.v0.10.7.a.161.rf')
return COSMOS_mastertable, ZF_cat, ZF_EAZY, ZF_FAST, U_spec, V_spec, J_spec,\
U_photo, V_photo, J_photo, UV_lee, VJ_lee, UV_IR_SFRs, MOSDEF_ZFOURGE,\
U_ZM,V_ZM, J_ZM, VUDS_ZFOURGE, VUDS_extra, U_ZV, V_ZV, J_ZV
def prepare_uds():
"""
This function will load data for the UDS field
Returns:
1. ZFIRE UDS catalogue with extra info from UKIDSS photometry.
2. UKIDSS photometry data for all galaxies in UDS field within the z=1.62 cluster region.
"""
UDS, UDS_photometry, all_UKIDSS, all_UKIDSS_photo_z, all_UKIDSS_U, all_UKIDSS_V, all_UKIDSS_J = open_uds_files()
UDS_photometry = pd.DataFrame(np.asarray(UDS_photometry), index=UDS_photometry['Keckid'])
UDS_photometry.index = UDS_photometry.index.astype(str)
UDS = pd.merge(UDS, UDS_photometry, left_index=True, right_index=True, how='left')
all_UKIDSS = pd.DataFrame(np.asarray(all_UKIDSS), index=all_UKIDSS['id'])
all_UKIDSS_photo_z = pd.DataFrame(np.asarray(all_UKIDSS_photo_z), index=all_UKIDSS_photo_z['id'])
all_UKIDSS_U = pd.Series(np.asarray(all_UKIDSS_U['L13']), index=all_UKIDSS_U['id'])
all_UKIDSS_V = pd.Series(np.asarray(all_UKIDSS_V['L15']), index=all_UKIDSS_V['id'])
all_UKIDSS_J = pd.Series(np.asarray(all_UKIDSS_J['L21']), index=all_UKIDSS_J['id'])
all_UKIDSS_photo_z['U'] = all_UKIDSS_U
all_UKIDSS_photo_z['V'] = all_UKIDSS_V
all_UKIDSS_photo_z['J'] = all_UKIDSS_J
all_UKIDSS_photo_z['UV'] = -2.5*np.log10(all_UKIDSS_photo_z['U']/all_UKIDSS_photo_z['V'])
all_UKIDSS_photo_z['VJ'] = -2.5*np.log10(all_UKIDSS_photo_z['V']/all_UKIDSS_photo_z['J'])
all_UKIDSS =pd.merge(all_UKIDSS,all_UKIDSS_photo_z, left_index=True, right_index=True,how='inner' )
UDS = pd.merge(UDS,all_UKIDSS , left_on='DR8id', right_index=True, how='left' )
UDS = UDS[~UDS.index.duplicated()]
#take params for the cluster
BCG_RA, BCG_DEC, z, dz = uds_clus_param()
UKIDSS_selected = all_UKIDSS[(all_UKIDSS.ra>(BCG_RA-(0.167*15))) & (all_UKIDSS.ra<(BCG_RA+(0.167*15))) & (all_UKIDSS.dec>(BCG_DEC-0.167)) &
(all_UKIDSS.dec<(BCG_DEC+0.167)) & (all_UKIDSS.z_peak>(z-dz)) &(all_UKIDSS.z_peak<(z+dz))]
return UDS, UKIDSS_selected
def open_uds_files():
"""
This function opens files related to the UDS field.
Returns:
zfire catalogue for uds field: UDS
basic UKIDSS photometry: UDS_photometry, all_UKIDSS, all_UKIDSS_photo_z
UVJ info: all_UKIDSS_U, all_UKIDSS_V, all_UKIDSS_J
"""
#zfire UDS master catalogue
UDS = pd.read_csv('data/zfire/zfire_uds_master_table_dr1.1.csv', index_col='Nameobj')
#don't ask me why there are so many different files with information repeated.
#its the beauty of a collaboration
#uds photometry data: provided by Kim-Vy Tran
UDS_photometry = ascii.read('data/ukidss/photometry/keckz-mags.txt')
#rest-frame colour info: Provided by Ryan Quadri
all_UKIDSS = ascii.read('data/ukidss/uvj/uds8_v0.2.test.cat')
all_UKIDSS_photo_z = ascii.read('data/ukidss/uvj/udsz.zout')
all_UKIDSS_U = ascii.read('data/ukidss/uvj/udsz.13.rf')
all_UKIDSS_V = ascii.read('data/ukidss/uvj/udsz.15.rf')
all_UKIDSS_J = ascii.read('data/ukidss/uvj/udsz.21.rf')
return UDS, UDS_photometry, all_UKIDSS, all_UKIDSS_photo_z, all_UKIDSS_U, all_UKIDSS_V, all_UKIDSS_J
def uds_clus_param():
"""
Define co-ordinates for the Papovich(2010) cluster.
Returns:
Centre RA, DEC, redshift, and the size of redshift bin
"""
BCG_RA = 2.30597 *15
BCG_DEC=-5.172194
z=1.62
dz=0.05
return BCG_RA, BCG_DEC, z, dz
#Exposure Times(in seconds)
Hbandmask1 = 19920 ; Hbandmask2 = 11520
DeepKband1 = 7560 ; DeepKband2 = 7200
KbandLargeArea3 = 11880 ; KbandLargeArea4 = 10260
shallowmask1 = 7200 ; shallowmask2 = 7200
shallowmask3 = 7200 ; shallowmask4 = 3960
UDS_Y1 = 20340 ;
UDS_H1 = 5640 ; UDS_J1 = 3360
UDS_H2 = 6720 ; UDS_J2 = 2880
UDS_H3 = 2880 ; UDS_J3 = 2880
def open_fits_COSMOS(galaxy, band ):
"""
v2.1.1: commented the emission line fits opening since it is not needed for the
purpose of p(z) stacks
version 2.1:
added to open the emission line fit files
upgrading to open multiple object spectra correctly.
version 1.3 21/11/14
upgrade to take spectra from the master table. The file open order should be:
1. the common folder to priorotize the objects observed in multiple observing runs
2. objects in individual observing runs
For H band option 1 doesn't apply
Multiobject option has been removed.
"""
if pd.isnull(galaxy.doubles) is True:
suffix_string = ''
ID = galaxy.Nameobj
else:
double_string = galaxy.doubles
if s.find(double_string, 'b')!=-1:
suffix_string = '-2'
ID = s.rstrip(galaxy.doubles, 'b')
elif s.find(double_string, 'c')!=-1:
suffix_string = '-3'
ID = s.rstrip(galaxy.doubles, 'c')
if band =='H':
path='../../spectra/spectra_1d/2014feb_1d/after_scaling/spectra/'
try:
fits = glob.glob(str(path)+'Hbandmask*'+ str(ID) +'_*_1D'+suffix_string+'.fits');# print fits; print path
Name = fits[0]; eps = pf.open(Name)
except IndexError:
print str(Object['Nameobj'])+" Object not found in H band"
return -99, -99, -99
print 'Opened ' + str(Name)
if s.find(Name, 'Hbandmask1')!=-1:
mask = 'Hbandmask1'; ET = Hbandmask1; print 'This is----> ' + str(mask)
elif s.find(Name, 'Hbandmask2')!=-1:
mask = 'Hbandmask2'; ET = Hbandmask2; print 'This is----> ' + str(mask)
else:
mask= 'unknown' ; print '**ERROR** mask not recognized: Please Check'
obs_run= 'feb'
elif band=='K':
try:
path='../../spectra/spectra_1d/common_1d/DR1/after_scaling_common_1D/'
fits = glob.glob(str(path)+'*_'+str(ID)+'_coadd1D.fits')
Name = fits[0]; eps = pf.open(Name); print "opened "+ str(Name)
assert len(fits)==1, "There are 0/multiple matches"
except IndexError:
path='../../spectra/spectra_1d/201*_1d/after_scaling/spectra/'
fits = glob.glob(str(path)+'*_K_K_*_'+ str(ID) +'_*_1D'+suffix_string+'.fits')
assert len(fits)==1, "There are 0/multiple matches"
if s.find(Name, 'DeepKband1')!=-1:
mask = 'DeepKband1' ; ET = DeepKband1 ; obs_run= 'feb'; print 'This is ' + str(mask); mask='DK1'
elif s.find(Name, 'DeepKband2')!=-1:
mask = 'DeepKband2'; ET = DeepKband2 ; obs_run= 'feb'; print 'This is ' + str(mask); mask='DK2'
elif s.find(Name, 'KbandLargeArea3')!=-1:
mask = 'KbandLargeArea3'; ET = KbandLargeArea3 ; obs_run= 'feb'; print 'This is ' + str(mask); mask='KL3'
elif s.find(Name, 'KbandLargeArea4')!=-1:
mask = 'KbandLargeArea4'; ET = KbandLargeArea4 ; obs_run= 'feb';print 'This is ' + str(mask); mask='KL4'
elif s.find(Name, 'shallowmask1')!=-1:
mask = 'shallowmask1' ; ET = shallowmask1; obs_run= 'dec'; print 'This is ' + str(mask); mask='SK1'
elif s.find(Name, 'shallowmask2')!=-1:
mask = 'shallowmask2'; ET = shallowmask2 ; obs_run= 'dec'; print 'This is ' + str(mask); mask='SK2'
elif s.find(Name, 'shallowmask3')!=-1:
mask = 'shallowmask3'; ET = shallowmask3; obs_run= 'dec'; print 'This is ' + str(mask); mask='SK3'
elif s.find(Name, 'shallowmask4')!=-1:
mask = 'shallowmask4'; ET = shallowmask4 ; obs_run= 'dec'; print 'This is ' + str(mask); mask='SK4'
else:
mask= 'COM' ; ET = -100 ; obs_run= 'decfeb'; print 'Object in both observing runs'
return eps, mask, ET, obs_run
def set_sky_weights(band, wave, w):
"""version 1.0
sets weights around sky lines to be 0 and everything else to be 1
the range is determined according to the spectral resolution"""
if band=='H': sky_lines= sky_H['wavelength']; spec_res = 4.5
elif band=='K': sky_lines= sky_K['wavelength']; spec_res = 5.5
elif band=='J': sky_lines= sky_J['wavelength']; spec_res = 4.0
elif band=='Y': sky_lines= sky_Y['wavelength']; spec_res = 3.5 #not checked
else: print 'Unknown Band'
for i, v in enumerate(sky_lines):
wave_mask = np.ma.masked_outside(wave, sky_lines[i]-spec_res, sky_lines[i]+spec_res)
masked_array = np.ma.getmaskarray(wave_mask)
np.place(w, masked_array==False, [0])
return w
def get_limits(galaxy):
data = open_fits_COSMOS(galaxy,'K')
scidata, sddata, wavelength, hdr = data[0].data, data[1].data, data[2].data, data[0].header
CRVAL1, CD1_1 , CRPIX1 = hdr['CRVAL1'], hdr['CD1_1'], hdr['CRPIX1']
i_w = np.arange(len(scidata))+1 #i_w should start from 1
wavelength = ((i_w - CRPIX1) * CD1_1 ) + CRVAL1
limits = np.nonzero(sddata)#masking procedure is fine. Checked 1/09/14
photometry_mask = np.ma.masked_inside(wavelength, wavelength[limits[0][0]], wavelength[limits[0][-1]] , copy=True)
photometry_mask = np.ma.getmaskarray(photometry_mask)
Lambda_limits = wavelength[photometry_mask]
flux_limits = scidata[photometry_mask]
error_limits = sddata[photometry_mask]
sky_weights = np.ones_like(Lambda_limits)
sky_weights = set_sky_weights('K',Lambda_limits, sky_weights)
print sky_weights[0:20]
fraction_lost = float(len(Lambda_limits[sky_weights==0]))/len(Lambda_limits)
print "fraction lost due to sky = ",fraction_lost
limit_low = (Lambda_limits[0]/6565)-1
limit_upper = (Lambda_limits[-1]/6565)-1
#make_spectra_plot(Lambda_limits,flux_limits,error_limits,galaxy.zspec, galaxy.Nameobj)
return limit_low, limit_upper, fraction_lost
def get_lambda(scidata, hdr):
CRVAL1, CD1_1 , CRPIX1 = hdr['CRVAL1'], hdr['CD1_1'], hdr['CRPIX1']
i_w = np.arange(len(scidata)) + 1
wavelength = ((i_w - CRPIX1) * CD1_1 ) + CRVAL1
return wavelength
def get_limits(hdr,low_x, high_x):
CRVAL1, CD1_1 , CRPIX1 = hdr['CRVAL1'], hdr['CD1_1'], hdr['CRPIX1']
pix_low, pix_high = np.int(((low_x - CRVAL1) / CD1_1 ) + CRPIX1), np.int(((high_x - CRVAL1) / CD1_1 ) + CRPIX1)
return pix_low, pix_high
def make_subplots_1D(ax,flux_1D,error_1D,wavelength, xlim,z,Name,Band, conf):
ax.step(wavelength ,flux_1D, linewidth=1.0,ls='-',
color='b', alpha=1.0, label='$\mathrm{Flux}$')
ax.step(wavelength ,error_1D, linewidth=0.5,ls='-',
color='r', alpha=1.0, label='$\mathrm{Error}$')
ax.fill_between(wavelength, flux_1D-error_1D, flux_1D+error_1D,linewidth=0,
facecolor='cyan', interpolate=True, edgecolor='white')
if (Name!='9593') and (Name!='7547') and (Name!='5155') :
plt.axvline(x=(z+1)*5008.240, ls='--', c='k')
ax.text(((z+1)*5008.240)+5,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[OIII]}$' )
plt.axvline(x=(z+1)*4960.295, ls='--', c='k')
ax.text(((z+1)*4960.295)+5,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[OIII]}$' )
plt.axvline(x=(z+1)*3728.000, ls='--', c='k')
ax.text(((z+1)*3728.000)+5,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[OII]}$' )
plt.axvline(x=(z+1)*4862.680, ls='--', c='k')
ax.text(((z+1)*4862.680)+5,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
r'$\mathrm{H\beta}$' )
plt.axvline(x=(z+1)*6564.610, ls='--', c='k')
ax.text(((z+1)*6564.610)+5,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
r'$\mathrm{H\alpha}$' )
plt.axvline(x=(z+1)*6585.270, ls='--', c='k')
ax.text(((z+1)*6585.270)+20,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[NII]}$' )
plt.axvline(x=(z+1)*6549.860, ls='--', c='k')
ax.text(((z+1)*6549.860)-120,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[NII]}$' )
plt.axvline(x=(z+1)*6718.290, ls='--', c='k')
ax.text(((z+1)*6718.290)-120,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[SII]}$' )
plt.axvline(x=(z+1)*6732.670, ls='--', c='k')
ax.text(((z+1)*6732.670)+10,
np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.85,
'$\mathrm{[SII]}$' )
plt.ylabel(r'$\mathrm{Flux\ (10^{-17}ergs/s/cm^2/\AA)}$' ,fontsize=10)
plt.xlim(xlim[0], xlim[1])
if Name=='3633':
plt.ylim(-0.2,0.78)
ax.text( xlim[1]-350, 0.6,
('$\mathrm{'+str(Name)+'\ '+str(Band)+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$'))
elif Name=='9593':
plt.ylim(-0.2,0.2)
ax.text( xlim[1]-350, 0.10,
('$\mathrm{'+str(Name)+'\ '+str(Band)+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$'))
elif Name=='3883':
plt.ylim(-0.1,0.2)
ax.text( xlim[1]-330, 0.12,
('$\mathrm{'+str(Name)+'\ '+str(Band)+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$'))
elif Name=='7547':
plt.ylim(-0.2,0.4)
ax.text( xlim[1]-350, -0.2,
('$\mathrm{'+str(Name)+'\ '+str(Band)+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$'))
elif Name=='5155':
plt.ylim(-0.2,0.38)
ax.text( xlim[1]-350, 0.2,
('$\mathrm{'+str(Name)+'\ '+str(Band)+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$'))
else:
plt.ylim(np.min(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.95,
np.max(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*1.05)
if Band=='H\ band':
ax.text( xlim[1]-250,
np.max(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.60,
('$\mathrm{'+str(Name)+'\ '+str(Band))+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$')
else:
ax.text( xlim[1]-350,
np.max(flux_1D[(wavelength>xlim[0]) & (wavelength<xlim[1])])*0.75,
('$\mathrm{'+str(Name)+'\ '+str(Band))+'}$'+'\n'+'$\mathrm{'+ 'Q_z='+str(conf)+'}$')
# We change the fontsize of minor ticks label
plt.tick_params(axis='both', which='major', labelsize=10)
#plt.tick_params(axis='both', which='minor', labelsize=15)
def make_subplots_2D(spectra_2D,xlim, xlabel=False):
pix_limit_low, pix_limit_high = get_limits(hdr, xlim[0], xlim[1])
spectra_2D = spectra_2D[:, pix_limit_low: pix_limit_high]
plt.imshow(spectra_2D, aspect = 'auto', cmap='gist_gray',
extent= ( xlim[0], xlim[1] , 40 , 0) ,vmin=-1e-19, vmax=9e-20 )
plt.gca().yaxis.set_major_locator(plt.NullLocator())
#extent = left, right, bottom, top
plt.tick_params(axis='both', which='major', labelsize=10)
if xlabel==True:
plt.xlabel(r'$\mathrm{Wavelength\ (\AA)}$',fontsize=12)
#vmin=-1e-19, vmax=9e-20, cmap='gray', aspect=1,interpolation='none'
|
themiyan/zfire_survey
|
zfire_utils.py
|
Python
|
mit
| 25,333
|
[
"Galaxy"
] |
e1e079476336a3ab124432f43e0f02559e8e0a583cbd468123323f90beb58723
|
# This is a module for Server Manager testing using Python.
# This provides several utility functions useful for testing
import os
import re
import sys
import exceptions
from vtkPVServerManagerDefaultPython import *
# we get different behavior based on how we import servermanager
# so we want to import servermanager the same way in this module
# as we do in any module that is importing this
SMModuleName = 'paraview.servermanager'
if sys.modules.has_key('paraview.simple'):
SMModuleName = 'paraview.simple'
sm = __import__(SMModuleName)
servermanager = sm.servermanager
class TestError(exceptions.Exception):
pass
__ProcessedCommandLineArguments__ = False
DataDir = ""
TempDir = ""
BaselineImage = ""
Threshold = 10.0
SMStatesDir = ""
StateXMLFileName = ""
UseSavedStateForRegressionTests = False
def Error(message):
print "ERROR: %s" % message
return False
def ProcessCommandLineArguments():
"""Processes the command line areguments."""
global DataDir
global TempDir
global BaselineImage
global Threshold
global StateXMLFileName
global UseSavedStateForRegressionTests
global SMStatesDir
global __ProcessedCommandLineArguments__
if __ProcessedCommandLineArguments__:
return
__ProcessedCommandLineArguments__ = True
length = len(sys.argv)
index = 1
while index < length:
key = sys.argv[index-1]
value = sys.argv[index]
index += 2
if key == "-D":
DataDir = value
elif key == "-V":
BaselineImage = value
elif key == "-T":
TempDir = value
elif key == "-S":
SMStatesDir = value
elif key == "--threshold":
Threshold = float(value)
elif key == "--state":
StateXMLFileName = value
elif key == "--use_saved_state":
UseSavedStateForRegressionTests = True
index -= 1
else:
index -=1
return
def LoadServerManagerState(filename):
"""This function loads the servermanager state xml/pvsm.
Returns the status of the load."""
global DataDir
ProcessCommandLineArguments()
parser = servermanager.vtkPVXMLParser()
try:
fp = open(filename, "r")
data = fp.read()
fp.close()
except:
return Error("Failed to open state file %s" % filename)
regExp = re.compile("\${DataDir}")
data = regExp.sub(DataDir, data)
if not parser.Parse(data):
return Error("Failed to parse")
loader = servermanager.vtkSMStateLoader()
loader.SetSession(servermanager.ActiveConnection.Session)
root = parser.GetRootElement()
if loader.LoadState(root):
pxm = servermanager.vtkSMProxyManager.GetProxyManager().GetActiveSessionProxyManager()
pxm.UpdateRegisteredProxiesInOrder(0);
pxm.UpdateRegisteredProxies(0)
return True
return Error("Failed to load state file %s" % filename)
def DoRegressionTesting(rmProxy=None):
"""Perform regression testing."""
global TempDir
global BaselineImage
global Threshold
ProcessCommandLineArguments()
testing = vtkSMTesting()
testing.AddArgument("-T")
testing.AddArgument(TempDir)
testing.AddArgument("-V")
testing.AddArgument(BaselineImage)
if not rmProxy:
rmProxy = servermanager.GetRenderView()
if rmProxy:
rmProxy = rmProxy.SMProxy
if not rmProxy:
raise "Failed to locate view to perform regression testing."
#pyProxy(rmProxy).SetRenderWindowSize(300, 300);
#rmProxy.GetProperty("RenderWindowSize").SetElement(0, 300)
#rmProxy.GetProperty("RenderWindowSize").SetElement(1, 300)
#rmProxy.UpdateVTKObjects()
rmProxy.StillRender()
testing.SetRenderViewProxy(rmProxy)
if testing.RegressionTest(Threshold) == 1:
return True
return Error("Regression Test Failed!")
if __name__ == "__main__":
# This script loads the state, saves out a temp state and loads the saved state.
# This saved state is used for testing -- this will ensure load/save SM state
# is working fine.
servermanager.Connect()
ProcessCommandLineArguments()
ret = 1
if StateXMLFileName:
if LoadServerManagerState(StateXMLFileName):
pxm = servermanager.vtkSMProxyManager.GetProxyManager().GetActiveSessionProxyManager()
if UseSavedStateForRegressionTests:
saved_state = os.path.join(TempDir, "temp.pvsm")
pxm.SaveState(saved_state)
pxm.UnRegisterProxies();
LoadServerManagerState(saved_state)
try:
os.remove(saved_state)
except:
pass
if DoRegressionTesting():
ret = 0
else:
Error("No ServerManager state file specified")
if ret:
# This leads to vtkDebugLeaks reporting leaks, hence we do this
# only when the tests failed.
sys.exit(ret)
|
HopeFOAM/HopeFOAM
|
ThirdParty-0.1/ParaView-5.0.1/Wrapping/Python/paraview/smtesting.py
|
Python
|
gpl-3.0
| 4,598
|
[
"ParaView"
] |
163e5d7e336b4e35b82979fbfe4e58a7418edee376262e8fb44946eec9b8cfc0
|
# -*- coding: utf-8 -*-
"""
Module for animating solutions in 1D.
Can also save them but requieres ffmpeg package
see save_animation method.
"""
import numpy as nm
from os.path import join as pjoin
from glob import glob
from matplotlib import animation
from matplotlib import pyplot as plt
from matplotlib import colors
from sfepy.discrete.fem.meshio import MeshioLibIO
from sfepy.discrete.fem.mesh import Mesh
# This would still use some refactoring so it is more flexible
__author__ = 'tomas_zitka'
ffmpeg_path = '' # for saving animations
def head(l):
"""
Maybe get head of the list.
Parameters
----------
l : indexable
Returns
-------
head : first element in l or None is l is empty
"""
if l:
return l[0]
else:
return None
def animate1D_dgsol(Y, X, T, ax=None, fig=None, ylims=None, labs=None,
plott=None, delay=None):
"""Animates solution of 1D problem into current figure.
Keep reference to returned animation object otherwise
it is discarded
Parameters
----------
Y :
solution, array |T| x |X| x n, where n is dimension of the solution
X :
space interval discetization
T :
time interval discretization
ax :
specify axes to plot to (Default value = None)
fig :
specifiy figure to plot to (Default value = None)
ylims :
limits for y axis, default are 10% offsets of Y extremes
labs :
labels to use for parts of the solution (Default value = None)
plott :
plot type - how to plot data: tested plot, step (Default value = None)
delay :
(Default value = None)
Returns
-------
anim
the animation object, keep it to see the animation, used for savig too
"""
ax, fig, time_text = setup_axis(X, Y, ax, fig, ylims)
if not isinstance(Y, nm.ndarray):
Y = nm.stack(Y, axis=2)
lines = setup_lines(ax, Y.shape, labs, plott)
def animate(i):
ax.legend()
time_text.set_text("t= {0:3.2f} / {1:3.3}".format(T[i], T[-1]))
# from sfepy.base.base import debug;
# debug()
if len(Y.shape) > 2:
for ln, l in enumerate(lines):
l.set_data(X, Y[i].swapaxes(0, 1)[ln])
return tuple(lines) + (time_text,)
# https://stackoverflow.com/questions/20624408/matplotlib-animating-multiple-lines-and-text
else:
lines.set_data(X, Y[i])
return lines, time_text
if delay is None:
delay = int(nm.round(2000 * (T[-1] - T[0]) / len(T)))
anim = animation.FuncAnimation(fig, animate, frames=len(T), interval=delay,
blit=True, repeat=True, repeat_delay=250)
return anim
def setup_axis(X, Y, ax=None, fig=None, ylims=None):
"""Setup axis, including timer for animation or snaps
Parameters
----------
X :
space disctretization to get limits
Y :
solution to get limits
ax :
ax where to put everything, if None current axes are used (Default value = None)
fig :
fig where to put everything, if None current figure is used (Default value = None)
ylims :
custom ylims, if None y axis limits are calculated from Y (Default value = None)
Returns
-------
ax
fig
time_text
object to fill in text
"""
if ax is None:
fig = plt.gcf()
ax = plt.gca()
if ylims is None:
lowery = nm.min(Y) - nm.min(Y) / 10
uppery = nm.max(Y) + nm.max(Y) / 10
else:
lowery = ylims[0]
uppery = ylims[1]
ax.set_ylim(lowery, uppery)
ax.set_xlim(X[0], X[-1])
time_text = ax.text(X[0] + nm.sign(X[0]) * X[0] / 10,
uppery - uppery / 10,
'empty', fontsize=15)
return ax, fig, time_text
def setup_lines(ax, Yshape, labs, plott):
"""Sets up artist for animation or solution snaps
Parameters
----------
ax :
axes to use for artist
Yshape : tuple
shape of the solution array
labs : list
labels for the solution
plott : str ("steps" or "plot")
type of plot to use
Returns
-------
lines
"""
if plott is None:
plott = ax.plot
else:
plott = ax.__getattribute__(plott)
if len(Yshape) > 2:
lines = [plott([], [], lw=2)[0] for foo in range(Yshape[2])]
for i, l in enumerate(lines):
if labs is None:
l.set_label("q" + str(i + 1) + "(x, t)")
else:
l.set_label(labs[i])
else:
lines, = plott([], [], lw=2)
if labs is None:
lines.set_label("q(x, t)")
else:
lines.set_label(labs)
return lines
def save_animation(anim, filename):
"""Saves animation as .mp4, requires ffmeg package
Parameters
----------
anim :
animation object
filename :
name of the file, without the .mp4 ending
"""
plt.rcParams['animation.ffmpeg_path'] = ffmpeg_path
writer = animation.FFMpegWriter(fps=24)
anim.save(filename + ".mp4", writer=writer)
def sol_frame(Y, X, T, t0=.5, ax=None, fig=None, ylims=None, labs=None, plott=None):
"""Creates snap of solution at specified time frame t0, basically gets one
frame from animate1D_dgsol, but colors wont be the same :-(
Parameters
----------
Y :
solution, array |T| x |X| x n, where n is dimension of the solution
X :
space interval discetization
T :
time interval discretization
t0 :
time to take snap at (Default value = .5)
ax :
specify axes to plot to (Default value = None)
fig :
specifiy figure to plot to (Default value = None)
ylims :
limits for y axis, default are 10% offsets of Y extremes
labs :
labels to use for parts of the solution (Default value = None)
plott :
plot type - how to plot data: tested plot, step (Default value = None)
Returns
-------
fig
"""
ax, fig, time_text = setup_axis(X, Y, ax, fig, ylims)
if not isinstance(Y, nm.ndarray):
Y = nm.stack(Y, axis=2)
lines = setup_lines(ax, Y.shape, labs, plott)
nt0 = nm.abs(T - t0).argmin()
ax.legend()
time_text.set_text("t= {0:3.2f} / {1:3.3}".format(T[nt0], T[-1]))
if len(Y.shape) > 2:
for ln, l in enumerate(lines):
l.set_data(X, Y[nt0].swapaxes(0, 1)[ln])
else:
lines.set_data(X, Y[nt0])
return fig
def save_sol_snap(Y, X, T, t0=.5, filename=None, name=None,
ylims=None, labs=None, plott=None):
"""Wrapper for sol_frame, saves the frame to file specified.
Parameters
----------
name :
name of the solution e.g. name of the solver used (Default value = None)
filename :
name of the file, overrides automatic generation (Default value = None)
Y :
solution, array |T| x |X| x n, where n is dimension of the solution
X :
space interval discetization
T :
time interval discretization
t0 :
time to take snap at (Default value = .5)
ylims :
limits for y axis, default are 10% offsets of Y extremes
labs :
labels to use for parts of the solution (Default value = None)
plott :
plot type - how to plot data: tested plot, step (Default value = None)
Returns
-------
fig
"""
if filename is None:
filename = "{0}_solsnap{1:3.2f}-{2:3.3}".format(name, t0, T[-1]).replace(".", "_")
if name is None:
name = "unknown_solver"
filename = "{0}_solsnap{1:3.2f}-{2:3.3}".format(name, t0, T[-1]).replace(".", "_")
filename = pjoin("semestralka", "figs", filename)
fig = plt.figure(filename)
snap1 = sol_frame(Y, X, T, t0=t0, ylims=ylims, labs=labs, plott=None)
if not isinstance(Y, nm.ndarray):
plt.plot(X, Y[0][0], label="q(x, 0)")
else:
if len(Y.shape) > 2:
plt.plot(X, Y[0, :, 0], label="q(x, 0)")
else:
plt.plot(X, Y[0, :], label="q(x, 0)")
plt.legend()
snap1.savefig(filename)
return fig
def plotsXT(Y1, Y2, YE, extent, lab1=None, lab2=None, lab3=None):
"""Plots Y1 and Y2 to one axes and YE to the second axes,
Y1 and Y2 are presumed to be two solutions and YE their error
Parameters
----------
Y1 :
solution 1, shape = (space nodes, time nodes)
Y2 :
solution 2, shape = (space nodes, time nodes)
YE :
soulutio 1 - soulution 2||
extent :
imshow extent
lab1 :
(Default value = None)
lab2 :
(Default value = None)
lab3 :
(Default value = None)
"""
# >> Plot contours
cmap1 = plt.cm.get_cmap("bwr")
cmap1.set_bad('white')
# cmap2 = plt.cm.get_cmap("BrBG")
# cmap2.set_bad('white')
bounds = nm.arange(-1, 1, .05)
norm1 = colors.BoundaryNorm(bounds, cmap1.N)
# norm2 = colors.BoundaryNorm(bounds, cmap2.N)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, sharey=True)
fig.suptitle("X-T plane plot")
if lab1 is not None:
ax1.set(title=lab1)
c1 = ax1.imshow(Y1, extent=extent,
cmap=cmap1, norm=norm1,
interpolation='none',
origin='lower')
ax1.grid()
if lab2 is not None:
ax2.set(title=lab2)
c2 = ax2.imshow(Y2, extent=extent,
cmap=cmap1, norm=norm1,
interpolation='none',
origin='lower')
ax2.grid()
if lab3 is not None:
ax3.set(title=lab3)
c3 = ax3.imshow(YE, extent=extent,
cmap="bwr", norm=norm1,
interpolation='none',
origin='lower')
ax3.grid()
fig.colorbar(c3, ax=[ax1, ax2, ax3])
def load_state_1D_vtk(name):
"""Load one VTK file containing state in time
Parameters
----------
name : str
Returns
-------
coors : ndarray
u : ndarray
"""
from sfepy.discrete.fem.meshio import MeshioLibIO
io = MeshioLibIO(name)
coors = io.read(Mesh()).coors[:, 0, None]
data = io.read_data(step=0)
var_name = head([k for k in data.keys() if "_modal" in k])[:-1]
if var_name is None:
print("File {} does not contain modal data.".format(name))
return
porder = len([k for k in data.keys() if var_name in k])
u = nm.zeros((porder, coors.shape[0] - 1, 1, 1))
for ii in range(porder):
u[ii, :, 0, 0] = data[var_name+'{}'.format(ii)].data
return coors, u
def load_1D_vtks(fold, name):
"""Reads series of .vtk files and crunches them into form
suitable for plot10_DG_sol.
Attempts to read modal cell data for variable mod_data. i.e.
``?_modal{i}``, where i is number of modal DOF
Resulting solution data have shape:
``(order, nspace_steps, ntime_steps, 1)``
Parameters
----------
fold :
folder where to look for files
name :
used in ``{name}.i.vtk, i = 0,1, ... tns - 1``
Returns
-------
coors : ndarray
mod_data : ndarray
solution data
"""
files = glob(pjoin(fold, name) + ".[0-9]*")
if len(files) == 0: # no multiple time steps, try loading single file
print("No files {} found in {}".format(pjoin(fold, name) + ".[0-9]*", fold))
print("Trying {}".format(pjoin(fold, name) + ".vtk"))
files = glob(pjoin(fold, name) + ".vtk")
if files:
return load_state_1D_vtk(files[0])
else:
print("Nothing found.")
return
io = MeshioLibIO(files[0])
coors = io.read(Mesh()).coors[:, 0, None]
data = io.read_data(step=0)
var_name = head([k for k in data.keys() if "_modal" in k])[:-1]
if var_name is None:
print("File {} does not contain modal data.".format(files[0]))
return
porder = len([k for k in data.keys() if var_name in k])
tn = len(files)
nts = sorted([int(f.split(".")[-2]) for f in files])
digs = len(files[0].split(".")[-2])
full_name_form = ".".join((pjoin(fold, name), ("{:0" + str(digs) + "d}"), "vtk"))
mod_data = nm.zeros((porder, coors.shape[0] - 1, tn, 1))
for i, nt in enumerate(nts):
io = MeshioLibIO(full_name_form.format(nt))
# parameter "step" does nothing, but is obligatory
data = io.read_data(step=0)
for ii in range(porder):
mod_data[ii, :, i, 0] = data[var_name+'{}'.format(ii)].data
return coors, mod_data
def animate_1D_DG_sol(coors, t0, t1, u,
tn=None, dt=None,
ic=lambda x: 0.0, exact=lambda x, t: 0,
delay=None, polar=False):
"""Animates solution to 1D problem produced by DG:
1. animates DOF values in elements as steps
2. animates reconstructed solution with discontinuities
Parameters
----------
coors :
coordinates of the mesh
t0 : float
starting time
t1 : float
final time
u :
vectors of DOFs, for each order one, shape(u) = (order, nspace_steps, ntime_steps, 1)
ic :
analytical initial condition, optional (Default value = lambda x: 0.0)
tn :
number of time steps to plot, starting at 0, if None and dt is not None run animation through
all time steps, spaced dt within [t0, tn] (Default value = None)
dt :
time step size, if None and tn is not None computed as (t1- t0) / tn otherwise set to 1
if dt and tn are both None, t0 and t1 are ignored and solution is animated as if in time 0 ... ntime_steps (Default value = None)
exact :
(Default value = lambda x)
t: 0 :
delay :
(Default value = None)
polar :
(Default value = False)
Returns
-------
anim_dofs : animation object of DOFs,
anim_recon : animation object of reconstructed solution
"""
# Setup space coordinates
XN = coors[-1]
X1 = coors[0]
Xvol = XN - X1
X = (coors[1:] + coors[:-1]) / 2
XS = nm.linspace(X1, XN, 500)[:, None]
if polar: # setup polar coorinates
coors *= 2*nm.pi
X *= 2*nm.pi
XS *= 2*nm.pi
# Setup times
if tn is not None and dt is not None:
T = nm.array(nm.cumsum(nm.ones(tn) * dt))
elif tn is not None:
T, dt = nm.linspace(t0, t1, tn, retstep=True)
elif dt is not None:
tn = int(nm.ceil(float(t1 - t0) / dt))
T = nm.linspace(t0, t1, tn)
else:
T = nm.arange(nm.shape(u)[2])
n_nod = len(coors)
n_el_nod = nm.shape(u)[0]
# prepend u[:, 0, ...] to all time frames for plotting step in left corner
u_step = nm.append(u[:, 0:1, :, 0], u[:, :, :, 0], axis=1)
# Plot DOFs directly
figs = plt.figure()
if polar:
axs = plt.subplot(111, projection='polar')
axs.set_theta_direction('clockwise')
else:
axs = plt.subplot(111)
# Plot mesh
axs.vlines(coors[:, 0], ymin=0, ymax=.5, colors="grey")
axs.vlines((X1, XN), ymin=0, ymax=.5, colors="k")
axs.vlines(X, ymin=0, ymax=.3, colors="grey", linestyles="--")
axs.plot([X1, XN], [1, 1], 'k')
# Plot IC and its sampling
for i in range(n_el_nod):
c0 = axs.plot(X, u[i, :, 0, 0],
label="IC-{}".format(i),
marker=".", ls="")[0].get_color()
# c1 = plt.plot(X, u[1, :, 0, 0], label="IC-1", marker=".", ls="")[0].get_color()
# # plt.plot(coors, .1*alones(n_nod), marker=".", ls="")
axs.step(coors[1:], u[i, :, 0, 0], color=c0)
# plt.step(coors[1:], u[1, :, 0, 0], color=c1)
# plt.plot(coors[1:], sic[1, :], label="IC-1", color=c1)
if ic is not None:
ics = ic(XS)
axs.plot(nm.squeeze(XS), nm.squeeze(ics), label="IC-ex")
# Animate sampled solution DOFs directly
anim_dofs = animate1D_dgsol(u_step.T, coors, T, axs, figs,
ylims=[-1, 2],
plott="step",
delay=delay)
if not polar:
axs.set_xlim(coors[0] - .1 * Xvol, coors[-1] + .1 * Xvol)
axs.legend(loc="upper left")
axs.set_title("Sampled solution")
# Plot reconstructed solution
figr = plt.figure()
if polar:
axr = plt.subplot(111, projection='polar')
axr.set_theta_direction('clockwise')
else:
axr = plt.subplot(111)
# Plot mesh
axr.vlines(coors[:, 0], ymin=0, ymax=.5, colors="grey")
axr.vlines((X1, XN), ymin=0, ymax=.5, colors="k")
axr.vlines(X, ymin=0, ymax=.3, colors="grey", linestyles="--")
axr.plot([X1, XN], [1, 1], 'k')
# Plot discontinuously!
# (order, space_steps, t_steps, 1)
ww, xx = reconstruct_legendre_dofs(coors, tn, u)
# plt.vlines(xx, ymin=0, ymax=.3, colors="green")
# plot reconstructed IC
axr.plot(xx, ww[:, 0], label="IC")
# get exact solution values
if exact is not None:
exact_vals = exact(xx, T)[..., None]
labs = ["q{}(x,t)".format(i) for i in range(ww.shape[-1])] + ["exact"]
ww = nm.concatenate((ww, exact_vals), axis=-1)
else:
labs = None
# Animate reconstructed solution
anim_recon = animate1D_dgsol(ww.swapaxes(0, 1), xx, T, axr, figr,
ylims=[-1, 2],
labs=labs,
delay=delay)
if not polar:
axr.set_xlim(coors[0] - .1 * Xvol, coors[-1] + .1 * Xvol)
axr.legend(loc="upper left")
axr.set_title("Reconstructed solution")
# sol_frame(u[:, :, :, 0].T, nm.append(coors, coors[-1]), T, t0=0., ylims=[-1, 1], plott="step")
plt.show()
return anim_dofs, anim_recon
def plot1D_legendre_dofs(coors, dofss, fun=None):
"""Plots values of DOFs as steps
Parameters
----------
coors :
coordinates of nodes of the mesh
dofss :
iterable of different projections' DOFs into legendre space
fun :
analytical function to plot (Default value = None)
"""
X = (coors[1:] + coors[:-1]) / 2
plt.figure("DOFs for function fun")
plt.gcf().clf()
for ii, dofs in enumerate(dofss):
for i in range(dofs.shape[1]):
c0 = plt.plot(X, dofs[:, i], label="fun-{}dof-{}".format(ii, i), marker=".", ls="")[0].get_color()
# # plt.plot(coors, .1*alones(n_nod), marker=".", ls="")
plt.step(coors[1:], dofs[:, i], color=c0)
# plt.plot(coors[1:], sic[1, :], label="IC-1", color=c1)
if fun is not None:
xs = nm.linspace(nm.min(coors), nm.max(coors), 500)[:, None]
plt.plot(xs, fun(xs), label="fun-ex")
plt.legend()
# plt.show()
def reconstruct_legendre_dofs(coors, tn, u):
"""Creates solution and coordinates vector which when plotted as
plot(xx, ww)
represent solution reconstructed from DOFs in Legendre poly space at
cell borders.
Works only as linear interpolation between cell boundary points
Parameters
----------
coors :
coors of nodes of the mesh
u :
vectors of DOFs, for each order one,
shape(u) = (order, nspace_steps, ntime_steps, 1)
tn :
number of time steps to reconstruct,
if None all steps are reconstructed
Returns
-------
ww : ndarray
solution values vector, shape is (3 * nspace_steps - 1, ntime_steps, 1),
xx : ndarray
corresponding coordinates vector, shape is (3 * nspace_steps - 1, 1)
"""
XN = coors[-1]
X1 = coors[0]
n_nod = len(coors) - 1
if tn is None:
tn = nm.shape(u)[2]
n_el_nod = nm.shape(u)[0]
ww = nm.zeros((3 * n_nod - 1, tn, 1))
for i in range(n_el_nod):
ww[0:-1:3] = ww[0:-1:3] + (-1)**i * u[i, :, :] # left edges of elements
ww[1::3] = ww[1::3] + u[i, :, :] # right edges of elements
# NaNs ensure plotting of discontinuities at element borders
ww[2::3, :] = nm.NaN
# nodes for plotting reconstructed solution
xx = nm.zeros((3 * n_nod - 1, 1))
xx[0] = X1
xx[-1] = XN
# the ending is still a bit odd, but hey, it works!
xx[1:-1] = nm.repeat(coors[1:-1], 3)[:, None]
return ww, xx
|
vlukes/sfepy
|
sfepy/discrete/dg/dg_1D_vizualizer.py
|
Python
|
bsd-3-clause
| 20,407
|
[
"VTK"
] |
f962c7e34f2a22d8c221b671c52515129a1d85082915f2c74185718ec3314dca
|
"""
Tests for the course home page.
"""
from datetime import datetime, timedelta
from unittest import mock
from urllib.parse import quote_plus
import ddt
from django.conf import settings
from django.http import QueryDict
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.timezone import now
from edx_toggles.toggles.testutils import override_waffle_flag
from pytz import UTC
from waffle.models import Flag
from waffle.testutils import override_flag
from common.djangoapps.course_modes.models import CourseMode
from common.djangoapps.course_modes.tests.factories import CourseModeFactory
from common.djangoapps.student.tests.factories import BetaTesterFactory
from common.djangoapps.student.tests.factories import GlobalStaffFactory
from common.djangoapps.student.tests.factories import InstructorFactory
from common.djangoapps.student.tests.factories import OrgInstructorFactory
from common.djangoapps.student.tests.factories import OrgStaffFactory
from common.djangoapps.student.tests.factories import StaffFactory
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_goals.api import add_course_goal_deprecated, get_course_goal
from lms.djangoapps.course_home_api.toggles import COURSE_HOME_USE_LEGACY_FRONTEND
from lms.djangoapps.courseware.tests.helpers import get_expiration_banner_text
from lms.djangoapps.discussion.django_comment_client.tests.factories import RoleFactory
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR
)
from openedx.core.djangoapps.schedules.models import Schedule
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from openedx.core.djangolib.markup import HTML
from openedx.features.course_duration_limits.models import CourseDurationLimitConfig
from openedx.features.course_experience import (
COURSE_ENABLE_UNENROLLED_ACCESS_FLAG,
COURSE_PRE_START_ACCESS_FLAG,
DISABLE_UNIFIED_COURSE_TAB_FLAG,
ENABLE_COURSE_GOALS,
SHOW_UPGRADE_MSG_ON_COURSE_HOME
)
from openedx.features.course_experience.tests import BaseCourseUpdatesTestCase
from openedx.features.course_experience.tests.views.helpers import add_course_mode, remove_course_mode
from common.djangoapps.student.models import CourseEnrollment, FBEEnrollmentExclusion
from common.djangoapps.student.tests.factories import UserFactory
from common.djangoapps.util.date_utils import strftime_localized
from xmodule.course_module import COURSE_VISIBILITY_PRIVATE, COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore import ModuleStoreEnum # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.django_utils import CourseUserType, ModuleStoreTestCase # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls # lint-amnesty, pylint: disable=wrong-import-order
TEST_PASSWORD = 'test'
TEST_CHAPTER_NAME = 'Test Chapter'
TEST_COURSE_TOOLS = 'Course Tools'
TEST_BANNER_CLASS = '<div class="course-expiration-message">'
TEST_WELCOME_MESSAGE = '<h2>Welcome!</h2>'
TEST_UPDATE_MESSAGE = '<h2>Test Update!</h2>'
TEST_COURSE_UPDATES_TOOL = '/course/updates">'
TEST_COURSE_HOME_MESSAGE = 'course-message'
TEST_COURSE_HOME_MESSAGE_ANONYMOUS = '/login'
TEST_COURSE_HOME_MESSAGE_UNENROLLED = 'Enroll now'
TEST_COURSE_HOME_MESSAGE_PRE_START = 'Course starts in'
TEST_COURSE_GOAL_OPTIONS = 'goal-options-container'
TEST_COURSE_GOAL_UPDATE_FIELD = 'section-goals'
TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN = 'section-goals hidden'
COURSE_GOAL_DISMISS_OPTION = 'unsure'
THREE_YEARS_AGO = now() - timedelta(days=(365 * 3))
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
def course_home_url(course):
"""
Returns the URL for the course's home page.
Arguments:
course (CourseBlock): The course being tested.
"""
return course_home_url_from_string(str(course.id))
def course_home_url_from_string(course_key_string):
"""
Returns the URL for the course's home page.
Arguments:
course_key_string (String): The course key as string.
"""
return reverse(
'openedx.course_experience.course_home',
kwargs={
'course_id': course_key_string,
}
)
class CourseHomePageTestCase(BaseCourseUpdatesTestCase):
"""
Base class for testing the course home page.
"""
@classmethod
def setUpClass(cls):
"""
Set up a course to be used for testing.
"""
# pylint: disable=super-method-not-called
with cls.setUpClassAndTestData():
with cls.store.default_store(ModuleStoreEnum.Type.split):
cls.course = CourseFactory.create(
org='edX',
number='test',
display_name='Test Course',
start=now() - timedelta(days=30),
metadata={"invitation_only": False}
)
cls.private_course = CourseFactory.create(
org='edX',
number='test',
display_name='Test Private Course',
start=now() - timedelta(days=30),
metadata={"invitation_only": True}
)
with cls.store.bulk_operations(cls.course.id):
chapter = ItemFactory.create(
category='chapter',
parent_location=cls.course.location,
display_name=TEST_CHAPTER_NAME,
)
section = ItemFactory.create(category='sequential', parent_location=chapter.location)
section2 = ItemFactory.create(category='sequential', parent_location=chapter.location)
ItemFactory.create(category='vertical', parent_location=section.location)
ItemFactory.create(category='vertical', parent_location=section2.location)
@classmethod
def setUpTestData(cls):
"""Set up and enroll our fake user in the course."""
super().setUpTestData()
cls.staff_user = StaffFactory(course_key=cls.course.id, password=TEST_PASSWORD)
def create_future_course(self, specific_date=None):
"""
Creates and returns a course in the future.
"""
return CourseFactory.create(
display_name='Test Future Course',
start=specific_date if specific_date else now() + timedelta(days=30),
)
class TestCourseHomePage(CourseHomePageTestCase): # lint-amnesty, pylint: disable=missing-class-docstring
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_welcome_message_when_unified(self):
# Create a welcome message
self.create_course_update(TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_WELCOME_MESSAGE, status_code=200)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
@override_waffle_flag(DISABLE_UNIFIED_COURSE_TAB_FLAG, active=True)
def test_welcome_message_when_not_unified(self):
# Create a welcome message
self.create_course_update(TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_WELCOME_MESSAGE, status_code=200)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_updates_tool_visibility(self):
"""
Verify that the updates course tool is visible only when the course
has one or more updates.
"""
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
self.create_course_update(TEST_UPDATE_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_queries(self):
"""
Verify that the view's query count doesn't regress.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2018, 1, 1, tzinfo=UTC))
# Pre-fetch the view to populate any caches
course_home_url(self.course)
# Fetch the view and verify the query counts
# TODO: decrease query count as part of REVO-28
with self.assertNumQueries(66, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(3):
url = course_home_url(self.course)
self.client.get(url)
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_start_date_handling(self):
"""
Verify that the course home page handles start dates correctly.
"""
# The course home page should 404 for a course starting in the future
future_course = self.create_future_course(datetime(2030, 1, 1, tzinfo=UTC))
url = course_home_url(future_course)
response = self.client.get(url)
self.assertRedirects(response, '/dashboard?notlive=Jan+01%2C+2030')
# With the Waffle flag enabled, the course should be visible
with override_flag(COURSE_PRE_START_ACCESS_FLAG.name, True):
url = course_home_url(future_course)
response = self.client.get(url)
assert response.status_code == 200
def test_legacy_redirect(self):
"""
Verify that the legacy course home page redirects to the MFE correctly.
"""
url = course_home_url(self.course) + '?foo=b$r'
response = self.client.get(url)
assert response.status_code == 302
assert response.get('Location') == 'http://learning-mfe/course/course-v1:edX+test+Test_Course/home?foo=b%24r'
@ddt.ddt
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
class TestCourseHomePageAccess(CourseHomePageTestCase):
"""
Test access to the course home page.
"""
def setUp(self):
super().setUp()
self.client.logout() # start with least access and add access back in the various test cases
# Make this a verified course so that an upgrade message might be shown
add_course_mode(self.course, mode_slug=CourseMode.AUDIT)
add_course_mode(self.course)
# Add a welcome message
self.create_course_update(TEST_WELCOME_MESSAGE)
@ddt.data(
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.ANONYMOUS, True, False],
[False, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ANONYMOUS, True, False],
[False, COURSE_VISIBILITY_PUBLIC, CourseUserType.ANONYMOUS, True, False],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.ANONYMOUS, True, False],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ANONYMOUS, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.ANONYMOUS, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED, True, False],
[False, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED, True, False],
[False, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED, True, False],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED, True, False],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.ENROLLED, False, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED_STAFF, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.GLOBAL_STAFF, True, True],
)
@ddt.unpack
def test_home_page(
self, enable_unenrolled_access, course_visibility, user_type,
expected_enroll_message, expected_course_outline,
):
self.create_user_for_course(self.course, user_type)
# Render the course home page
with mock.patch('xmodule.course_module.CourseBlock.course_visibility', course_visibility):
# Test access with anonymous flag and course visibility
with override_waffle_flag(COURSE_ENABLE_UNENROLLED_ACCESS_FLAG, enable_unenrolled_access):
url = course_home_url(self.course)
response = self.client.get(url)
private_url = course_home_url(self.private_course)
private_response = self.client.get(private_url)
is_anonymous = user_type is CourseUserType.ANONYMOUS
is_enrolled = user_type is CourseUserType.ENROLLED
is_enrolled_or_staff = is_enrolled or user_type in (
CourseUserType.UNENROLLED_STAFF, CourseUserType.GLOBAL_STAFF
)
# Verify that the course tools and dates are shown for enrolled users & staff
self.assertContains(response, TEST_COURSE_TOOLS, count=(1 if is_enrolled_or_staff else 0))
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
# Verify that start button, course sock, and welcome message
# are only shown to enrolled users or staff.
self.assertContains(response, 'Start Course', count=(1 if is_enrolled_or_staff else 0))
self.assertContains(response, TEST_WELCOME_MESSAGE, count=(1 if is_enrolled_or_staff else 0))
# Verify the outline is shown to enrolled users, unenrolled_staff and anonymous users if allowed
self.assertContains(response, TEST_CHAPTER_NAME, count=(1 if expected_course_outline else 0))
# Verify the message shown to the user
if not enable_unenrolled_access or course_visibility != COURSE_VISIBILITY_PUBLIC:
self.assertContains(
response, 'To see course content', count=(1 if is_anonymous else 0)
)
self.assertContains(response, '<div class="user-messages"', count=(1 if expected_enroll_message else 0))
if expected_enroll_message:
self.assertContains(response, 'You must be enrolled in the course to see course content.')
if enable_unenrolled_access and course_visibility == COURSE_VISIBILITY_PUBLIC:
if user_type == CourseUserType.UNENROLLED and self.private_course.invitation_only:
if expected_enroll_message:
self.assertContains(private_response,
'You must be enrolled in the course to see course content.')
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
@override_waffle_flag(DISABLE_UNIFIED_COURSE_TAB_FLAG, active=True)
@ddt.data(
[CourseUserType.ANONYMOUS, 'To see course content'],
[CourseUserType.ENROLLED, None],
[CourseUserType.UNENROLLED, 'You must be enrolled in the course to see course content.'],
[CourseUserType.UNENROLLED_STAFF, 'You must be enrolled in the course to see course content.'],
)
@ddt.unpack
def test_home_page_not_unified(self, user_type, expected_message):
"""
Verifies the course home tab when not unified.
"""
self.create_user_for_course(self.course, user_type)
# Render the course home page
url = course_home_url(self.course)
response = self.client.get(url)
# Verify that welcome messages are never shown
self.assertNotContains(response, TEST_WELCOME_MESSAGE)
# Verify that the outline, start button, course sock, course tools, and welcome message
# are only shown to enrolled users or unenrolled staff.
is_enrolled = user_type is CourseUserType.ENROLLED
is_unenrolled_staff = user_type is CourseUserType.UNENROLLED_STAFF
expected_count = 1 if (is_enrolled or is_unenrolled_staff) else 0
self.assertContains(response, TEST_CHAPTER_NAME, count=expected_count)
self.assertContains(response, 'Start Course', count=expected_count)
self.assertContains(response, TEST_COURSE_TOOLS, count=expected_count)
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
# Verify that the expected message is shown to the user
self.assertContains(response, '<div class="user-messages"', count=1 if expected_message else 0)
if expected_message:
self.assertContains(response, expected_message)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_sign_in_button(self):
"""
Verify that the sign in button will return to this page.
"""
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, f'/login?next={quote_plus(url)}')
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_non_live_course(self):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404.
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
url = course_home_url(future_course)
response = self.client.get(url)
start_date = strftime_localized(future_course.start, 'SHORT_DATE')
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = start_date
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_course_does_not_expire_for_verified_user(self):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = UserFactory.create(password=self.TEST_PASSWORD)
CourseEnrollment.enroll(user, self.course.id, mode=CourseMode.VERIFIED)
Schedule.objects.update(start_date=THREE_YEARS_AGO)
# ensure that the user who has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
assert response.status_code == 200, 'Should not expire access for user'
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@ddt.data(
InstructorFactory,
StaffFactory,
BetaTesterFactory,
OrgStaffFactory,
OrgInstructorFactory,
)
def test_course_does_not_expire_for_course_staff(self, role_factory):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = role_factory.create(password=self.TEST_PASSWORD, course_key=course.id)
CourseEnrollment.enroll(user, self.course.id, mode=CourseMode.AUDIT)
Schedule.objects.update(start_date=THREE_YEARS_AGO)
# ensure that the user has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
assert response.status_code == 200, 'Should not expire access for user'
@ddt.data(
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_ADMINISTRATOR
)
def test_course_does_not_expire_for_user_with_course_role(self, role_name):
"""
Test that users with the above roles for a course do not lose access
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = UserFactory.create()
role = RoleFactory(name=role_name, course_id=course.id)
role.users.add(user)
# ensure the user has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
assert response.status_code == 200, 'Should not expire access for user'
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@ddt.data(
GlobalStaffFactory,
)
def test_course_does_not_expire_for_global_users(self, role_factory):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = role_factory.create(password=self.TEST_PASSWORD)
CourseEnrollment.enroll(user, self.course.id, mode=CourseMode.AUDIT)
Schedule.objects.update(start_date=THREE_YEARS_AGO)
# ensure that the user who has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
assert response.status_code == 200, 'Should not expire access for user'
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expired_course(self):
"""
Ensure that a user accessing an expired course sees a redirect to
the student dashboard, not a 404.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:
CourseModeFactory.create(course_id=course.id, mode_slug=mode)
# assert that an if an expired audit user tries to access the course they are redirected to the dashboard
audit_user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)
audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)
audit_enrollment.created = THREE_YEARS_AGO + timedelta(days=1)
audit_enrollment.save()
response = self.client.get(url)
expiration_date = strftime_localized(course.start + timedelta(weeks=4) + timedelta(days=1), 'SHORT_DATE')
expected_params = QueryDict(mutable=True)
course_name = CourseOverview.get_from_id(course.id).display_name_with_default
expected_params['access_response_error'] = 'Access to {run} expired on {expiration_date}'.format(
run=course_name,
expiration_date=expiration_date
)
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expiration_banner_with_expired_upgrade_deadline(self):
"""
Ensure that a user accessing a course with an expired upgrade deadline
will still see the course expiration banner without the upgrade related text.
"""
past = datetime(2010, 1, 1, tzinfo=UTC)
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=past)
course = CourseFactory.create(start=now() - timedelta(days=10))
CourseModeFactory.create(course_id=course.id, mode_slug=CourseMode.AUDIT)
CourseModeFactory.create(course_id=course.id, mode_slug=CourseMode.VERIFIED, expiration_datetime=past)
user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=user.username, password=self.TEST_PASSWORD)
CourseEnrollment.enroll(user, course.id, mode=CourseMode.AUDIT)
url = course_home_url(course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, course)
self.assertContains(response, bannerText, html=True)
self.assertContains(response, TEST_BANNER_CLASS)
def test_audit_only_not_expired(self):
"""
Verify that enrolled users are NOT shown the course expiration banner and can
access the course home page if course audit only
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))
audit_only_course = CourseFactory.create()
self.create_user_for_course(audit_only_course, CourseUserType.ENROLLED)
response = self.client.get(course_home_url(audit_only_course))
assert response.status_code == 200
self.assertContains(response, TEST_COURSE_TOOLS)
self.assertNotContains(response, TEST_BANNER_CLASS)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expired_course_in_holdback(self):
"""
Ensure that a user accessing an expired course that is in the holdback
does not get redirected to the student dashboard, not a 404.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:
CourseModeFactory.create(course_id=course.id, mode_slug=mode)
# assert that an if an expired audit user in the holdback tries to access the course
# they are not redirected to the dashboard
audit_user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)
audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)
Schedule.objects.update(start_date=THREE_YEARS_AGO)
FBEEnrollmentExclusion.objects.create(
enrollment=audit_enrollment
)
response = self.client.get(url)
assert response.status_code == 200
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@mock.patch("common.djangoapps.util.date_utils.strftime_localized")
def test_non_live_course_other_language(self, mock_strftime_localized):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404, even if the localized date is unicode
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
fake_unicode_start_time = "üñîçø∂é_ßtå®t_tîµé"
mock_strftime_localized.return_value = fake_unicode_start_time
url = course_home_url(future_course)
response = self.client.get(url)
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = fake_unicode_start_time
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
def test_nonexistent_course(self):
"""
Ensure a non-existent course results in a 404.
"""
self.create_user_for_course(self.course, CourseUserType.ANONYMOUS)
url = course_home_url_from_string('not/a/course')
response = self.client.get(url)
assert response.status_code == 404
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_settings(PLATFORM_NAME="edX")
def test_masters_course_message(self):
enroll_button_html = "<button class=\"enroll-btn btn-link\">Enroll now</button>"
# Verify that unenrolled users visiting a course with a Master's track
# that is not the only track are shown an enroll call to action message
add_course_mode(self.course, CourseMode.MASTERS, 'Master\'s Mode', upgrade_deadline_expired=False)
remove_course_mode(self.course, CourseMode.AUDIT)
self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
self.assertContains(response, enroll_button_html)
# Verify that unenrolled users visiting a course that contains only a Master's track
# are not shown an enroll call to action message
remove_course_mode(self.course, CourseMode.VERIFIED)
response = self.client.get(url)
expected_message = ('You must be enrolled in the course to see course content. '
'Please contact your degree administrator or edX Support if you have questions.')
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, expected_message)
self.assertNotContains(response, enroll_button_html)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
def test_course_messaging(self):
"""
Ensure that the following four use cases work as expected
1) Anonymous users are shown a course message linking them to the login page
2) Unenrolled users are shown a course message allowing them to enroll
3) Enrolled users who show up on the course page after the course has begun
are not shown a course message.
4) Enrolled users who show up on the course page after the course has begun will
see the course expiration banner if course duration limits are on for the course.
5) Enrolled users who show up on the course page before the course begins
are shown a message explaining when the course starts as well as a call to
action button that allows them to add a calendar event.
"""
# Verify that anonymous users are shown a login link in the course message
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
# Verify that unenrolled users are shown an enroll call to action message
user = self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
# Verify that enrolled users are not shown any state warning message when enrolled and course has begun.
CourseEnrollment.enroll(user, self.course.id)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
# Verify that enrolled users are shown the course expiration banner if content gating is enabled
# We use .save() explicitly here (rather than .objects.create) in order to force the
# cache to refresh.
config = CourseDurationLimitConfig(
course=CourseOverview.get_from_id(self.course.id),
enabled=True,
enabled_as_of=datetime(2018, 1, 1, tzinfo=UTC)
)
config.save()
url = course_home_url(self.course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, self.course)
self.assertContains(response, bannerText, html=True)
# Verify that enrolled users are not shown the course expiration banner if content gating is disabled
config.enabled = False
config.save()
url = course_home_url(self.course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, self.course)
self.assertNotContains(response, bannerText, html=True)
# Verify that enrolled users are shown 'days until start' message before start date
future_course = self.create_future_course()
CourseEnrollment.enroll(user, future_course.id)
url = course_home_url(future_course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
def test_course_messaging_for_staff(self):
"""
Staff users will not see the expiration banner when course duration limits
are on for the course.
"""
config = CourseDurationLimitConfig(
course=CourseOverview.get_from_id(self.course.id),
enabled=True,
enabled_as_of=datetime(2018, 1, 1, tzinfo=UTC)
)
config.save()
url = course_home_url(self.course)
CourseEnrollment.enroll(self.staff_user, self.course.id)
response = self.client.get(url)
bannerText = get_expiration_banner_text(self.staff_user, self.course)
self.assertNotContains(response, bannerText, html=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goals(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the set course goal message.
2) Enrolled users are shown the set course goal message if they have not yet set a course goal.
3) Enrolled users are not shown the set course goal message if they have set a course goal.
4) Enrolled and verified users are not shown the set course goal message.
5) Enrolled users are not shown the set course goal message in a course that cannot be verified.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the set course goal message.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are shown the set course goal message in a verified course.
CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users that have set a course goal are not shown the set course goal message.
add_course_goal_deprecated(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled and verified users are not shown the set course goal message.
get_course_goal(user, verifiable_course.id).delete()
CourseEnrollment.enroll(user, verifiable_course.id, CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are not shown the set course goal message in an audit only course.
audit_only_course = CourseFactory.create()
CourseEnrollment.enroll(user, audit_only_course.id)
response = self.client.get(course_home_url(audit_only_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goal_updates(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the update goal selection field.
2) Enrolled users are not shown the update goal selection field if they have not yet set a course goal.
3) Enrolled users are shown the update goal selection field if they have set a course goal.
4) Enrolled users in the verified track are shown the update goal selection field.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the update goal selection field.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
# Verify that enrolled users that have not set a course goal are shown a hidden update goal selection field.
enrollment = CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled users that have set a course goal are shown a visible update goal selection field.
add_course_goal_deprecated(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled and verified users are shown the update goal selection
CourseEnrollment.update_enrollment(enrollment, is_active=True, mode=CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
@ddt.ddt
@override_waffle_flag(COURSE_HOME_USE_LEGACY_FRONTEND, active=True)
class CourseHomeFragmentViewTests(ModuleStoreTestCase):
"""
Test Messages Displayed on the Course Home
"""
CREATE_USER = False
def setUp(self):
super().setUp()
CommerceConfiguration.objects.create(checkout_on_ecommerce_service=True)
end = now() + timedelta(days=30)
self.course = CourseFactory(
start=now() - timedelta(days=30),
end=end,
self_paced=True,
)
self.url = course_home_url(self.course)
CourseMode.objects.create(course_id=self.course.id, mode_slug=CourseMode.AUDIT) # lint-amnesty, pylint: disable=no-member
self.verified_mode = CourseMode.objects.create(
course_id=self.course.id, # lint-amnesty, pylint: disable=no-member
mode_slug=CourseMode.VERIFIED,
min_price=100,
expiration_datetime=end,
sku='test'
)
self.user = UserFactory()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.flag, __ = Flag.objects.update_or_create(
name=SHOW_UPGRADE_MSG_ON_COURSE_HOME.name, defaults={'everyone': True}
)
def assert_upgrade_message_not_displayed(self):
response = self.client.get(self.url)
self.assertNotContains(response, 'section-upgrade')
def assert_upgrade_message_displayed(self): # lint-amnesty, pylint: disable=missing-function-docstring
response = self.client.get(self.url)
self.assertContains(response, 'section-upgrade')
url = EcommerceService().get_checkout_page_url(self.verified_mode.sku)
self.assertContains(response, '<a id="green_upgrade" class="btn-brand btn-upgrade"')
self.assertContains(response, url)
self.assertContains(
response,
f"Upgrade (<span class='price'>${self.verified_mode.min_price}</span>)",
)
def test_no_upgrade_message_if_logged_out(self):
self.client.logout()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_not_enrolled(self):
assert len(CourseEnrollment.enrollments_for_user(self.user)) == 0
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_verified_track(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.VERIFIED) # lint-amnesty, pylint: disable=no-member
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_upgrade_deadline_passed(self):
self.verified_mode.expiration_datetime = now() - timedelta(days=20)
self.verified_mode.save()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_flag_disabled(self):
self.flag.everyone = False
self.flag.save()
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT) # lint-amnesty, pylint: disable=no-member
self.assert_upgrade_message_not_displayed()
def test_display_upgrade_message_if_audit_and_deadline_not_passed(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT) # lint-amnesty, pylint: disable=no-member
self.assert_upgrade_message_displayed()
@mock.patch(
'openedx.features.course_experience.views.course_home.format_strikeout_price',
mock.Mock(return_value=(HTML("<span>DISCOUNT_PRICE</span>"), True))
)
def test_upgrade_message_discount(self):
# pylint: disable=no-member
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
with override_waffle_flag(SHOW_UPGRADE_MSG_ON_COURSE_HOME, True):
response = self.client.get(self.url)
self.assertContains(response, "<span>DISCOUNT_PRICE</span>")
|
eduNEXT/edx-platform
|
openedx/features/course_experience/tests/views/test_course_home.py
|
Python
|
agpl-3.0
| 43,964
|
[
"VisIt"
] |
2de007671331509bc6395f3ce4617ec950cb9ecbd6bb5402807f13a8a3afadba
|
# Implementation of KNN algorithm with default K = 5
# This script expects a directory as argument.
# Given directory should have a file called train_set.csv and
# another file called test_set.csv
import helpers.files as files
import helpers.normalizers as normalizers
import helpers.ann as ann
from config.constants import *
from string import replace
import csv
import numpy as np
import math
import random
import sys
import os
import time
if len(sys.argv) < 2:
print ERROR+""" Please provide the directory where I can
find test and train partitions (they should be called "test_set.csv" and
"train_set".csv). Optionally, you can also set config options in config/
constants.py\n"""
sys.exit()
dir_name = sys.argv[1].lstrip('.').rstrip('/').strip(" ")
if not os.path.isfile(dir_name+'/train_set.csv'):
print ERROR+" File "+dir_name+"/train_set.csv not found.\n"
sys.exit()
if not os.path.isfile(dir_name+'/test_set.csv'):
print ERROR+" File "+dir_name+"/test_set.csv not found.\n"
sys.exit()
train_set_file = dir_name+'/train_set.csv'
train_set = files.load_into_matrix(train_set_file,skip_first=False)
# we won't load the targets because they're what we're trying to predict
prediction_set = files.load_into_matrix(dir_name+'/test_set.csv',skip_first=False,num_attributes=NUM_ATTRS,load_targets=False,num_targets=NUM_TARGETS)
#some attributes may be ignored if user has set config option EXCLUDE_ATTRS
attributes_to_use = filter(lambda x: False if x in EXCLUDE_ATTRS else True,np.arange(NUM_ATTRS))
#training the network
#this is where the weights (between the input and the hidden layers) are kept
wij = ann.init_input_weights(len(attributes_to_use),NUM_NEURONS_HIDDEN_LAYER,NUM_DIGITS)
#these are the weights between the hidden layer and the output neuron (one per neuron in the hidden layer)
wj = ann.init_output_weights(NUM_NEURONS_HIDDEN_LAYER,NUM_DIGITS)
# i need as many input nodes as there are attributes so i'll just copy that
# note that this list may not be continuous in case an attribute is being ignored
x_values = list()
# and as many yi's as there are hidden layer neurons
y_results = range(NUM_NEURONS_HIDDEN_LAYER)
y_errors = range(NUM_NEURONS_HIDDEN_LAYER)
y_network = None
for epoch in range(NUM_EPOCHS):
for row in train_set:
######################
# forward-feeding
######################
x_values = ann.extract_attributes(row,attributes_to_use,NUM_DIGITS)
for idx,y in enumerate(y_results):
#weights used for current row, for the neuron whose index is idx
weights_for_this_neuron = ann.extract_weights_for_neuron(wij,idx,NUM_DIGITS)
# y is the output for this neuron
y = ann.intermediate_output(x_values,weights_for_this_neuron,NUM_DIGITS)
y_results[idx] = y
y_network = ann.network_output(y_results,wj,NUM_DIGITS)
y_actual = ann.get_target_for_row(row,PREDICT_TARGET)
####################
# back-propagation
####################
network_error = y_actual - y_network
#propagating the error to the hidden layer neurones
for idx,y in enumerate(y_results):
y_errors[idx] = network_error * wj[idx]
#propagating the error to the weights between the input layer and the hidden layer
for idx,y in enumerate(y_results):
ann.update_incoming_weights_for_neuron(wij,x_values,idx, y_errors[idx],y_results[idx],LEARNING_RATE)
#propagating the error to the weights between the hidden layer and the output node
for idx,y in enumerate(y_results):
ann.update_outgoing_weights_for_neuron(wj,idx,network_error,y_results,LEARNING_RATE)
# the gran finale! setting the predictions using the learned weights
for idx,row in enumerate(prediction_set):
prediction_set[idx][PREDICT_TARGET] = ann.run_test_instance(wij,wj,row,PREDICT_TARGET,NUM_NEURONS_HIDDEN_LAYER,NUM_DIGITS)
predictions_dir_name = files.get_predictions_dir_from_partitions_dir(dir_name)
files.save_list_of_lists_as_csv(predictions_dir_name+"_ann/trained_weights_for_input_nodes.csv",wij)
files.save_list_as_csv(predictions_dir_name+"_ann/trained_weights_for_output_node.csv",wj)
#this is useful in case we want to know how we got these variables
constants = "NUM_NEURONS_HIDDEN_LAYER =>",NUM_NEURONS_HIDDEN_LAYER
constants += "LEARNING_RATE =>",LEARNING_RATE
constants += "NUM_EPOCHS =>",NUM_EPOCHS
files.save_matrix_as_csv(predictions_dir_name+"_ann/prediction_set.csv",prediction_set)
files.save_matrix_as_csv(predictions_dir_name+"_ann/configs_used.txt",constants)
print "\n "+SUCCESS+""" Artificial Neural Network has been successfully trained and executed!
Look at \033[36m"""+predictions_dir_name+"""_ann/trained_weights_for_input_nodes.csv\033[0m
and \033[36m"""+predictions_dir_name+"""_ann/trained_weights_for_output_node.csv\033[0m
for the weights that were used.
In addition the set located at
\033[36m"""+predictions_dir_name+"""_ann/prediction_set.csv\033[0m contains the predictions
based upon the previously mentioned trained weights.\n"""
|
queirozfcom/ml201401
|
train_and_apply_ann.py
|
Python
|
mit
| 5,209
|
[
"NEURON"
] |
9ac5e047bfcbe40df6130947caf3f0e0e49a0c1e2488994c22b9cd66aa7eaa5c
|
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import matplotlib.pyplot as plt
data_anal = np.loadtxt("eof_analytical.dat")
data_ek = np.loadtxt("eof_electrokinetics.dat")
fig1 = plt.figure(figsize=(16, 4))
ax = fig1.add_subplot(131)
ax.plot(data_ek[:, 0], data_ek[:, 1], 'o',
mfc='none', color="r", label="electrokinetics")
ax.plot(data_anal[:, 0], data_anal[:, 1], color="b", label="analytical")
ax.set_xlabel("x-position")
ax.set_ylabel("density")
ax.legend(loc="best")
ax = fig1.add_subplot(132)
ax.plot(data_ek[:, 0], data_ek[:, 2], 'o',
mfc='none', color="r", label="electrokinetics")
ax.plot(data_anal[:, 0], data_anal[:, 2], color="b", label="analytical")
ax.set_xlabel("x-position")
ax.set_ylabel("velocity")
ax.legend(loc="best")
ax = fig1.add_subplot(133)
ax.plot(data_ek[:, 0], data_ek[:, 3], 'o',
mfc='none', color="r", label="electrokinetics")
ax.plot(data_anal[:, 0], data_anal[:, 3], color="b", label="analytical")
ax.set_xlabel("x-position")
ax.set_ylabel("shear stress xz")
ax.legend(loc="best")
plt.show()
|
espressomd/espresso
|
doc/tutorials/electrokinetics/scripts/plot.py
|
Python
|
gpl-3.0
| 1,736
|
[
"ESPResSo"
] |
af0598b1e7fb5679a02041b22b753f07c9adcaa9520d972146d3dc34123e9e6f
|
#!/usr/bin/env python
import sys
sys.path.append(".")
from netcdf import netcdf as nc
from models.helpers import to_datetime, short
from glob import glob
from itertools import groupby
import os
from functools import partial
from datetime import datetime
import numpy as np
rev_key = {}
TO_RAD = 30. * 60.
TO_MJ = 10 ** -6
TO_MJRAD = TO_RAD * TO_MJ
def initialize(radiance_filename, radiation_filename, callback=lambda r: r):
ref, _ = nc.open(radiation_filename)
ref_radiation = nc.getvar(ref, 'globalradiation')
with nc.loader(radiance_filename) as radiance_root:
radiance = nc.getvar(radiance_root, 'radiance', source=ref_radiation)
radiance[0, :] = callback(radiance[0, :])
nc.close(ref)
def radiance(radiance_files, radiance_filename):
if radiance_filename in rev_key:
filename = rev_key[radiance_filename]
initialize(radiance_filename, filename,
lambda r: r * TO_MJRAD)
else:
interpolate_radiance(radiance_files, radiance_filename)
def search_closest(list_items, filename, step):
index = list_items.index(filename)
max_index = len(list_items) - 1
check = lambda i: 0 <= i <= max_index
while check(index) and list_items[index] not in rev_key:
index = step(index)
return list_items[index] if check(index) else None
def calculate_weights(for_file, files):
day = short(for_file, 2, 4)
slot = int(short(for_file, 4, 5)[1:])
hour = slot / 2.
minute = 60 * (hour % 1)
itime = datetime.strptime('%s %i:%i' % (day, int(hour), minute),
'%Y.%j %H:%M')
times = map(to_datetime, files)
diff_t = (times[0] - times[1]).total_seconds()
weights = map(lambda t:
1 - abs((itime - t).total_seconds() / diff_t),
times)
return weights
def interpolate_radiance(radiance_files, radiance_filename):
before = search_closest(radiance_files, radiance_filename, lambda s: s - 1)
after = search_closest(radiance_files, radiance_filename, lambda s: s + 1)
extrems = filter(lambda x: x, [before, after])
if extrems:
ref_filename = max(extrems)
files = map(lambda e: rev_key[e], extrems)
root, is_new = nc.open(files)
radiation = nc.getvar(root, 'globalradiation')
if len(extrems) > 1:
radiation = np.average(radiation[:], axis=0,
weights=calculate_weights(radiance_filename,
files))
else:
radiation = radiation[:].mean()
initialize(radiance_filename, rev_key[ref_filename],
lambda r: radiation * TO_MJRAD)
nc.close(root)
def generate_radiance_filename(filename):
prefix = short(filename, 0, 3)
decimalhour = lambda t: t.hour + t.minute/60. + t.second/3600.
slot = str(int(round(decimalhour(to_datetime(filename))*2))).zfill(2)
suffix = short(filename, 4, 6)
if not os.path.exists('products/radiance'):
os.makedirs('products/radiance')
output_filename = 'products/radiance/rad.%s.S%s.%s' % (
prefix, slot, suffix)
return output_filename
def complete(radiance_files):
t_slots = set(range(20, 47))
if radiance_files:
prefix = short(radiance_files[0], 0, 2)
suffix = short(radiance_files[0], -2, None)
slot = lambda filename: int(short(filename, 4)[1:])
to_datetime = lambda f: short(f, 2, 4)
groups = groupby(sorted(radiance_files), to_datetime)
for day, files_by_day in groups:
slots_by_day = set(map(slot, list(files_by_day)))
new_slots = t_slots - slots_by_day
id = lambda s: '%s.S%s' % (day, str(s).zfill(2))
output_file = lambda s: 'products/radiance/%s.%s.%s' % (
prefix, id(s), suffix)
radiance_files += map(output_file, new_slots)
radiance_files.sort()
return radiance_files
def workwith(path='products/estimated/*.nc'):
estimated_files = glob(path)
radiance_files = map(generate_radiance_filename, estimated_files)
rev_key.update(dict(zip(radiance_files, estimated_files)))
radiance_files = complete(radiance_files)
map(partial(radiance, radiance_files), radiance_files)
if __name__ == '__main__':
workwith()
|
gersolar/solar_radiation_model
|
models/utils/radiance.py
|
Python
|
mit
| 4,308
|
[
"NetCDF"
] |
5774ca206baa6bed58022a616a0fb78c4eeaeea6a049f1cf92b4a2d15b683eaf
|
"""
# Notes:
- This simulation seeks to emulate the COBAHH benchmark simulations of (Brette
et al. 2007) using the Brian2 simulator for speed benchmark comparison to
DynaSim. However, this simulation does NOT include synapses, for better
comparison to Figure 5 of (Goodman and Brette, 2008) - although it uses the
COBAHH model of (Brette et al. 2007), not CUBA.
- The time taken to simulate will be indicated in the stdout log file
'~/batchdirs/brian_benchmark_COBAHH_nosyn_4/pbsout/brian_benchmark_COBAHH_nosyn_4.out'
- Note that this code has been slightly modified from the original (Brette et
al. 2007) benchmarking code, available here on ModelDB:
https://senselab.med.yale.edu/modeldb/showModel.cshtml?model=83319
in order to work with version 2 of the Brian simulator (aka Brian2), and also
modified to change the model being benchmarked, etc.
# References:
- Brette R, Rudolph M, Carnevale T, Hines M, Beeman D, Bower JM, et al.
Simulation of networks of spiking neurons: A review of tools and strategies.
Journal of Computational Neuroscience 2007;23:349–98.
doi:10.1007/s10827-007-0038-6.
- Goodman D, Brette R. Brian: a simulator for spiking neural networks in Python.
Frontiers in Neuroinformatics 2008;2. doi:10.3389/neuro.11.005.2008.
"""
from brian2 import *
# Parameters
cells = 4
defaultclock.dt = 0.01*ms
area = 20000*umetre**2
Cm = (1*ufarad*cmetre**-2) * area
gl = (5e-5*siemens*cmetre**-2) * area
El = -60*mV
EK = -90*mV
ENa = 50*mV
g_na = (100*msiemens*cmetre**-2) * area
g_kd = (30*msiemens*cmetre**-2) * area
VT = -63*mV
# # Time constants
# taue = 5*ms
# taui = 10*ms
# # Reversal potentials
# Ee = 0*mV
# Ei = -80*mV
# we = 6*nS # excitatory synaptic weight
# wi = 67*nS # inhibitory synaptic weight
# The model
eqs = Equations('''
dv/dt = (gl*(El-v)-
g_na*(m*m*m)*h*(v-ENa)-
g_kd*(n*n*n*n)*(v-EK))/Cm : volt
dm/dt = alpha_m*(1-m)-beta_m*m : 1
dn/dt = alpha_n*(1-n)-beta_n*n : 1
dh/dt = alpha_h*(1-h)-beta_h*h : 1
alpha_m = 0.32*(mV**-1)*(13*mV-v+VT)/
(exp((13*mV-v+VT)/(4*mV))-1.)/ms : Hz
beta_m = 0.28*(mV**-1)*(v-VT-40*mV)/
(exp((v-VT-40*mV)/(5*mV))-1)/ms : Hz
alpha_h = 0.128*exp((17*mV-v+VT)/(18*mV))/ms : Hz
beta_h = 4./(1+exp((40*mV-v+VT)/(5*mV)))/ms : Hz
alpha_n = 0.032*(mV**-1)*(15*mV-v+VT)/
(exp((15*mV-v+VT)/(5*mV))-1.)/ms : Hz
beta_n = .5*exp((10*mV-v+VT)/(40*mV))/ms : Hz
''')
# dv/dt = (gl*(El-v)+ge*(Ee-v)+gi*(Ei-v)-
# dge/dt = -ge*(1./taue) : siemens
# dgi/dt = -gi*(1./taui) : siemens
P = NeuronGroup(cells, model=eqs, threshold='v>-20*mV', refractory=3*ms,
method='euler')
proportion=int(0.8*cells)
Pe = P[:proportion]
Pi = P[proportion:]
# Ce = Synapses(Pe, P, on_pre='ge+=we')
# Ci = Synapses(Pi, P, on_pre='gi+=wi')
# Ce.connect(p=0.98)
# Ci.connect(p=0.98)
# Initialization
P.v = 'El + (randn() * 5 - 5)*mV'
# P.ge = '(randn() * 1.5 + 4) * 10.*nS'
# P.gi = '(randn() * 12 + 20) * 10.*nS'
# Record a few traces
trace = StateMonitor(P, 'v', record=[1, 10, 100])
totaldata = StateMonitor(P, 'v', record=True)
run(0.5 * second, report='text')
# plot(trace.t/ms, trace[1].v/mV)
# plot(trace.t/ms, trace[10].v/mV)
# plot(trace.t/ms, trace[100].v/mV)
# xlabel('t (ms)')
# ylabel('v (mV)')
# show()
# print("Saving TC cell voltages!")
# numpy.savetxt("foo_totaldata.csv", totaldata.v/mV, delimiter=",")
|
asoplata/dynasim-benchmark-brette-2007
|
output/Brian2/brian2_benchmark_COBAHH_nosyn_0004/brian2_benchmark_COBAHH_nosyn_0004.py
|
Python
|
gpl-3.0
| 3,341
|
[
"Brian"
] |
a13a8c03479379c11eb0910a35776ac515e72a71ba0c75c8d4d4c43e6d35ce89
|
"""Interface to the CaImAn package (https://github.com/simonsfoundation/CaImAn)."""
import numpy as np
import multiprocessing as mp
from caiman import components_evaluation
from caiman.utils import visualization
from caiman.source_extraction.cnmf import map_reduce, initialization, pre_processing, \
merging, spatial, temporal, deconvolution
from .caiman_stats import df_percentile
from scipy.ndimage import percentile_filter
import glob, os, sys, time
def log(*messages):
""" Simple logging function."""
formatted_time = "[{}]".format(time.ctime())
print(formatted_time, *messages, flush=True, file=sys.__stdout__)
def mute_function(f):
""" Decorator to ignore any standard output of the function."""
def wrapper(*args, **kwargs):
try:
sys.stdout = open(os.devnull, 'w')
return f(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__ # go back to normal (even after exceptions)
return wrapper
@mute_function
def extract_masks(scan, mmap_scan, num_components=200, num_background_components=1,
merge_threshold=0.8, init_on_patches=True, init_method='greedy_roi',
soma_diameter=(14, 14), snmf_alpha=None, patch_size=(50, 50),
proportion_patch_overlap=0.2, num_components_per_patch=5,
num_processes=8, num_pixels_per_process=5000, fps=15):
""" Extract masks from multi-photon scans using CNMF.
Uses constrained non-negative matrix factorization to find spatial components (masks)
and their fluorescence traces in a scan. Default values work well for somatic scans.
Performed operations are:
[Initialization on full image | Initialization on patches -> merge components] ->
spatial update -> temporal update -> merge components -> spatial update ->
temporal update
:param np.array scan: 3-dimensional scan (image_height, image_width, num_frames).
:param np.memmap mmap_scan: 2-d scan (image_height * image_width, num_frames)
:param int num_components: An estimate of the number of spatial components in the scan
:param int num_background_components: Number of components to model the background.
:param int merge_threshold: Maximal temporal correlation allowed between the activity
of overlapping components before merging them.
:param bool init_on_patches: If True, run the initialization methods on small patches
of the scan rather than on the whole image.
:param string init_method: Initialization method for the components.
'greedy_roi': Look for a gaussian-shaped patch, apply rank-1 NMF, store
components, calculate residual scan and repeat for num_components.
'sparse_nmf': Regularized non-negative matrix factorization (as impl. in sklearn)
:param (float, float) soma_diameter: Estimated neuron size in y and x (pixels). Used
in'greedy_roi' initialization to search for neurons of this size.
:param int snmf_alpha: Regularization parameter (alpha) for sparse NMF (if used).
:param (float, float) patch_size: Size of the patches in y and x (pixels).
:param float proportion_patch_overlap: Patches are sampled in a sliding window. This
controls how much overlap is between adjacent patches (0 for none, 0.9 for 90%).
:param int num_components_per_patch: Number of components per patch (used if
init_on_patches=True)
:param int num_processes: Number of processes to run in parallel. None for as many
processes as available cores.
:param int num_pixels_per_process: Number of pixels that a process handles each
iteration.
:param fps: Frame rate. Used for temporal downsampling and to remove bad components.
:returns: Weighted masks (image_height x image_width x num_components). Inferred
location of each component.
:returns: Denoised fluorescence traces (num_components x num_frames).
:returns: Masks for background components (image_height x image_width x
num_background_components).
:returns: Traces for background components (image_height x image_width x
num_background_components).
:returns: Raw fluorescence traces (num_components x num_frames). Fluorescence of each
component in the scan minus activity from other components and background.
..warning:: The produced number of components is not exactly what you ask for because
some components will be merged or deleted.
..warning:: Better results if scans are nonnegative.
"""
# Get some params
image_height, image_width, num_frames = scan.shape
# Start processes
log('Starting {} processes...'.format(num_processes))
pool = mp.Pool(processes=num_processes)
# Initialize components
log('Initializing components...')
if init_on_patches:
# TODO: Redo this (per-patch initialization) in a nicer/more efficient way
# Make sure they are integers
patch_size = np.array(patch_size)
half_patch_size = np.int32(np.round(patch_size / 2))
num_components_per_patch = int(round(num_components_per_patch))
patch_overlap = np.int32(np.round(patch_size * proportion_patch_overlap))
# Create options dictionary (needed for run_CNMF_patches)
options = {'patch_params': {'ssub': 'UNUSED.', 'tsub': 'UNUSED', 'nb': num_background_components,
'only_init': True, 'skip_refinement': 'UNUSED.',
'remove_very_bad_comps': False}, # remove_very_bads_comps unnecesary (same as default)
'preprocess_params': {'check_nan': False}, # check_nan is unnecessary (same as default value)
'spatial_params': {'nb': num_background_components}, # nb is unnecessary, it is pased to the function and in init_params
'temporal_params': {'p': 0, 'method': 'UNUSED.', 'block_size': 'UNUSED.'},
'init_params': {'K': num_components_per_patch, 'gSig': np.array(soma_diameter)/2,
'gSiz': None, 'method': init_method, 'alpha_snmf': snmf_alpha,
'nb': num_background_components, 'ssub': 1, 'tsub': max(int(fps / 2), 1),
'options_local_NMF': 'UNUSED.', 'normalize_init': True,
'rolling_sum': True, 'rolling_length': 100, 'min_corr': 'UNUSED',
'min_pnr': 'UNUSED', 'deconvolve_options_init': 'UNUSED',
'ring_size_factor': 'UNUSED', 'center_psf': 'UNUSED'},
# gSiz, ssub, tsub, options_local_NMF, normalize_init, rolling_sum unnecessary (same as default values)
'merging' : {'thr': 'UNUSED.'}}
# Initialize per patch
res = map_reduce.run_CNMF_patches(mmap_scan.filename, (image_height, image_width, num_frames),
options, rf=half_patch_size, stride=patch_overlap,
gnb=num_background_components, dview=pool)
initial_A, initial_C, YrA, initial_b, initial_f, pixels_noise, _ = res
# Merge spatially overlapping components
merged_masks = ['dummy']
while len(merged_masks) > 0:
res = merging.merge_components(mmap_scan, initial_A, initial_b, initial_C,
initial_f, initial_C, pixels_noise,
{'p': 0, 'method': 'cvxpy'}, spatial_params='UNUSED',
dview=pool, thr=merge_threshold, mx=np.Inf)
initial_A, initial_C, num_components, merged_masks, S, bl, c1, neurons_noise, g = res
# Delete log files (one per patch)
log_files = glob.glob('caiman*_LOG_*')
for log_file in log_files:
os.remove(log_file)
else:
from scipy.sparse import csr_matrix
if init_method == 'greedy_roi':
res = _greedyROI(scan, num_components, soma_diameter, num_background_components)
log('Refining initial components (HALS)...')
res = initialization.hals(scan, res[0].reshape([image_height * image_width, -1], order='F'),
res[1], res[2].reshape([image_height * image_width, -1], order='F'),
res[3], maxIter=3)
initial_A, initial_C, initial_b, initial_f = res
else:
print('Warning: Running sparse_nmf initialization on the entire field of view '
'takes a lot of time.')
res = initialization.initialize_components(scan, K=num_components, nb=num_background_components,
method=init_method, alpha_snmf=snmf_alpha)
initial_A, initial_C, initial_b, initial_f, _ = res
initial_A = csr_matrix(initial_A)
log(initial_A.shape[-1], 'components found...')
# Remove bad components (based on spatial consistency and spiking activity)
log('Removing bad components...')
good_indices, _ = components_evaluation.estimate_components_quality(initial_C, scan,
initial_A, initial_C, initial_b, initial_f, final_frate=fps, r_values_min=0.7,
fitness_min=-20, fitness_delta_min=-20, dview=pool)
initial_A = initial_A[:, good_indices]
initial_C = initial_C[good_indices]
log(initial_A.shape[-1], 'components remaining...')
# Estimate noise per pixel
log('Calculating noise per pixel...')
pixels_noise, _ = pre_processing.get_noise_fft_parallel(mmap_scan, num_pixels_per_process, pool)
# Update masks
log('Updating masks...')
A, b, C, f = spatial.update_spatial_components(mmap_scan, initial_C, initial_f, initial_A, b_in=initial_b,
sn=pixels_noise, dims=(image_height, image_width),
method='dilate', dview=pool,
n_pixels_per_process=num_pixels_per_process,
nb=num_background_components)
# Update traces (no impulse response modelling p=0)
log('Updating traces...')
res = temporal.update_temporal_components(mmap_scan, A, b, C, f, nb=num_background_components,
block_size=10000, p=0, method='cvxpy', dview=pool)
C, A, b, f, S, bl, c1, neurons_noise, g, YrA, _ = res
# Merge components
log('Merging overlapping (and temporally correlated) masks...')
merged_masks = ['dummy']
while len(merged_masks) > 0:
res = merging.merge_components(mmap_scan, A, b, C, f, S, pixels_noise, {'p': 0, 'method': 'cvxpy'},
'UNUSED', dview=pool, thr=merge_threshold, bl=bl, c1=c1,
sn=neurons_noise, g=g)
A, C, num_components, merged_masks, S, bl, c1, neurons_noise, g = res
# Refine masks
log('Refining masks...')
A, b, C, f = spatial.update_spatial_components(mmap_scan, C, f, A, b_in=b, sn=pixels_noise,
dims=(image_height, image_width),
method='dilate', dview=pool,
n_pixels_per_process=num_pixels_per_process,
nb=num_background_components)
# Refine traces
log('Refining traces...')
res = temporal.update_temporal_components(mmap_scan, A, b, C, f, nb=num_background_components,
block_size=10000, p=0, method='cvxpy', dview=pool)
C, A, b, f, S, bl, c1, neurons_noise, g, YrA, _ = res
# Removing bad components (more stringent criteria)
log('Removing bad components...')
good_indices, _ = components_evaluation.estimate_components_quality(C + YrA, scan, A,
C, b, f, final_frate=fps, r_values_min=0.8, fitness_min=-40, fitness_delta_min=-40,
dview=pool)
A = A.toarray()[:, good_indices]
C = C[good_indices]
YrA = YrA[good_indices]
log(A.shape[-1], 'components remaining...')
# Stop processes
log('Done.')
pool.close()
# Get results
masks = A.reshape((image_height, image_width, -1), order='F') # h x w x num_components
traces = C # num_components x num_frames
background_masks = b.reshape((image_height, image_width, -1), order='F') # h x w x num_components
background_traces = f # num_background_components x num_frames
raw_traces = C + YrA # num_components x num_frames
# Rescale traces to match scan range
scaling_factor = np.sum(masks**2, axis=(0, 1)) / np.sum(masks, axis=(0, 1))
traces = traces * np.expand_dims(scaling_factor, -1)
raw_traces = raw_traces * np.expand_dims(scaling_factor, -1)
masks = masks / scaling_factor
background_scaling_factor = np.sum(background_masks**2, axis=(0, 1)) / np.sum(background_masks,
axis=(0,1))
background_traces = background_traces * np.expand_dims(background_scaling_factor, -1)
background_masks = background_masks / background_scaling_factor
return masks, traces, background_masks, background_traces, raw_traces
def _save_as_memmap(scan, base_name='caiman', chunk_size=5000):
"""Save the scan as a memory mapped file as expected by caiman
:param np.array scan: Scan to save shaped (image_height, image_width, num_frames)
:param string base_name: Base file name for the scan. No underscores.
:param int chunk_size: Write the mmap_scan chunk frames at a time. Memory efficient.
:returns: Filename of the mmap file.
:rtype: string
"""
# Get some params
image_height, image_width, num_frames = scan.shape
num_pixels = image_height * image_width
# Build filename
filename = '{}_d1_{}_d2_{}_d3_1_order_C_frames_{}_.mmap'.format(base_name, image_height,
image_width, num_frames)
# Create memory mapped file
mmap_scan = np.memmap(filename, mode='w+', shape=(num_pixels, num_frames), dtype=np.float32)
for i in range(0, num_frames, chunk_size):
chunk = scan[..., i: i + chunk_size].reshape((num_pixels, -1), order='F')
mmap_scan[:, i: i + chunk_size] = chunk
mmap_scan.flush()
return mmap_scan
def _greedyROI(scan, num_components=200, neuron_size=(11, 11),
num_background_components=1):
""" Initialize components by searching for gaussian shaped, highly active squares.
#one by one by moving a gaussian window over every pixel and
taking the highest activation as the center of the next neuron.
:param np.array scan: 3-dimensional scan (image_height, image_width, num_frames).
:param int num_components: The desired number of components.
:param (float, float) neuron_size: Expected size of the somas in pixels (y, x).
:param int num_background_components: Number of components that model the background.
"""
from scipy import ndimage
# Get some params
image_height, image_width, num_frames = scan.shape
# Get the gaussian kernel
gaussian_stddev = np.array(neuron_size) / 4 # entire neuron in four standard deviations
gaussian_kernel = _gaussian2d(gaussian_stddev)
# Create residual scan (scan minus background)
residual_scan = scan - np.mean(scan, axis=(0, 1)) # image-wise brightness
background = ndimage.gaussian_filter(np.mean(residual_scan, axis=-1), neuron_size)
residual_scan -= np.expand_dims(background, -1)
# Create components
masks = np.zeros([image_height, image_width, num_components], dtype=np.float32)
traces = np.zeros([num_components, num_frames], dtype=np.float32)
mean_frame = np.mean(residual_scan, axis=-1)
for i in range(num_components):
# Get center of next component
neuron_locations = ndimage.gaussian_filter(mean_frame, gaussian_stddev)
y, x = np.unravel_index(np.argmax(neuron_locations), [image_height, image_width])
# Compute initial trace (bit messy because of edges)
half_kernel = np.fix(np.array(gaussian_kernel.shape) / 2).astype(np.int32)
big_yslice = slice(max(y - half_kernel[0], 0), y + half_kernel[0] + 1)
big_xslice = slice(max(x - half_kernel[1], 0), x + half_kernel[1] + 1)
kernel_yslice = slice(max(0, half_kernel[0] - y),
None if image_height > y + half_kernel[0] else image_height - y - half_kernel[0] - 1)
kernel_xslice = slice(max(0, half_kernel[1] - x),
None if image_width > x + half_kernel[1] else image_width - x - half_kernel[1] - 1)
cropped_kernel = gaussian_kernel[kernel_yslice, kernel_xslice]
trace = np.average(residual_scan[big_yslice, big_xslice].reshape(-1, num_frames),
weights=cropped_kernel.ravel(), axis=0)
# Get mask and trace using 1-rank NMF
half_neuron = np.fix(np.array(neuron_size) / 2).astype(np.int32)
yslice = slice(max(y - half_neuron[0], 0), y + half_neuron[0] + 1)
xslice = slice(max(x - half_neuron[1], 0), x + half_neuron[1] + 1)
mask, trace = _rank1_NMF(residual_scan[yslice, xslice], trace)
# Update residual scan
neuron_activity = np.expand_dims(mask, -1) * trace
residual_scan[yslice, xslice] -= neuron_activity
mean_frame[yslice, xslice] = np.mean(residual_scan[yslice, xslice], axis=-1)
# Store results
masks[yslice, xslice, i] = mask
traces[i] = trace
# Create background components
residual_scan += np.mean(scan, axis=(0, 1)) # add back overall brightness
residual_scan += np.expand_dims(background, -1) # and background
if num_background_components == 1:
background_masks = np.expand_dims(np.mean(residual_scan, axis=-1), axis=-1)
background_traces = np.expand_dims(np.mean(residual_scan, axis=(0, 1)), axis=0)
else:
from sklearn.decomposition import NMF
print("Warning: Fitting more than one background component uses scikit-learn's "
"NMF and may take some time.""")
model = NMF(num_background_components, random_state=123, verbose=True)
flat_masks = model.fit_transform(residual_scan.reshape(-1, num_frames))
background_masks = flat_masks.reshape([image_height, image_width, -1])
background_traces = model.components_
return masks, traces, background_masks, background_traces
def _gaussian2d(stddev, truncate=4):
""" Creates a 2-d gaussian kernel truncated at 4 standard deviations (8 in total).
:param (float, float) stddev: Standard deviations in y and x.
:param float truncate: Number of stddevs at each side of the kernel.
..note:: Kernel sizes will always be odd.
"""
from matplotlib import mlab
half_kernel = np.round(stddev * truncate) # kernel_size = 2 * half_kernel + 1
y, x = np.meshgrid(np.arange(-half_kernel[0], half_kernel[0] + 1),
np.arange(-half_kernel[1], half_kernel[1] + 1))
kernel = mlab.bivariate_normal(x, y, sigmay=stddev[0], sigmax=stddev[1])
return kernel
# Based on caiman.source_extraction.cnmf.initialization.finetune()
def _rank1_NMF(scan, trace, num_iterations=5):
num_frames = scan.shape[-1]
for i in range(num_iterations):
mask = np.maximum(np.dot(scan, trace), 0)
mask = mask * np.sum(mask) / np.sum(mask ** 2)
trace = np.average(scan.reshape(-1, num_frames), weights=mask.ravel(), axis=0)
return mask, trace
def deconvolve(trace, AR_order=2):
""" Deconvolve traces using noise constrained deconvolution (Pnevmatikakis et al., 2016)
:param np.array trace: 1-d array (num_frames) with the fluorescence trace.
:param int AR_order: Order of the autoregressive process used to model the impulse
response function, e.g., 0 = no modelling; 2 = model rise plus exponential decay.
:returns: Deconvolved spike trace.
:returns: AR coefficients (AR_order) that model the calcium response:
c(t) = c(t-1) * AR_coeffs[0] + c(t-2) * AR_coeffs[1] + ...
"""
_, _, _, AR_coeffs, _, spike_trace, _ = deconvolution.constrained_foopsi(trace,
p=AR_order, method='cvxpy', bas_nonneg=False, fudge_factor=0.96)
# fudge_factor is a regularization term
return spike_trace, AR_coeffs
def deconvolve_detrended(trace, scan_fps, detrend_period=600, AR_order=2):
"""Same as the the `deconvolve` method, except that the fluorescence trace is detrended
before autoregressive modeling
:param np.array trace: 1-d array (num_frames) with the fluorescence trace.
:param float scan_fps: fps of the scan
:param float detrend_period: number of seconds over which percentiles are computed
:param int AR_order: Order of the autoregressive process used to model the impulse
response function, e.g., 0 = no modelling; 2 = model rise plus exponential decay.
:returns: Deconvolved spike trace.
:returns: AR coefficients (AR_order) that model the calcium response:
c(t) = c(t-1) * AR_coeffs[0] + c(t-2) * AR_coeffs[1] + ...
"""
detrend_window = int(round(detrend_period * scan_fps))
n_chunks = len(trace) // detrend_window
if detrend_window > 0 and n_chunks > 0:
chunks_len = n_chunks * detrend_window
trace_chunks = trace[:chunks_len].reshape(-1, detrend_window)
data_prct = df_percentile(trace_chunks, axis=1)[0].mean()
trace = trace - percentile_filter(trace, data_prct, detrend_window)
_, _, _, AR_coeffs, _, spike_trace, _ = deconvolution.constrained_foopsi(trace,
p=AR_order, method='cvxpy', bas_nonneg=False, fudge_factor=0.96)
return spike_trace, AR_coeffs
def get_centroids(masks):
""" Calculate the centroids of each mask (calls caiman's plot_contours).
:param np.array masks: Masks (image_height x image_width x num_components)
:returns: Centroids (num_components x 2) in y, x pixels of each component.
"""
# Reshape masks
image_height, image_width, num_components = masks.shape
masks = masks.reshape(-1, num_components, order='F')
# Get centroids
fake_background = np.empty([image_height, image_width]) # needed for plot contours
coordinates = visualization.plot_contours(masks, fake_background)
import matplotlib.pyplot as plt; plt.close()
centroids = np.array([coordinate['CoM'] for coordinate in coordinates])
return centroids
def classify_masks(masks, soma_diameter=(12, 12)):
""" Uses a convolutional network to predict the probability per mask of being a soma.
:param np.array masks: Masks (image_height x image_width x num_components)
:returns: Soma predictions (num_components).
"""
# Reshape masks
image_height, image_width, num_components = masks.shape
masks = masks.reshape(-1, num_components, order='F')
# Prepare input
from scipy.sparse import coo_matrix
masks = coo_matrix(masks)
soma_radius = np.int32(np.round(np.array(soma_diameter)/2))
model_path = '/data/pipeline/python/pipeline/data/cnn_model'
probs, _ = components_evaluation.evaluate_components_CNN(masks, (image_height, image_width),
soma_radius, model_name=model_path)
return probs[:, 1]
# Legacy: Used in preprocess.ExtractRaw
def demix_and_deconvolve_with_cnmf(scan, num_components=200, AR_order=2,
merge_threshold=0.8, num_processes=20,
num_pixels_per_process=5000, block_size=10000,
num_background_components=4, init_method='greedy_roi',
soma_radius=(5, 5), snmf_alpha=None,
init_on_patches=False, patch_downsampling_factor=None,
percentage_of_patch_overlap=None):
""" Extract spike train activity from multi-photon scans using CNMF.
Uses constrained non-negative matrix factorization to find neurons/components
(locations) and their fluorescence traces (activity) in a timeseries of images, and
deconvolves them using an autoregressive model of the calcium impulse response
function. See Pnevmatikakis et al., 2016 for details.
Default values work alright for somatic images.
:param np.array scan: 3-dimensional scan (image_height, image_width, num_frames).
:param int num_components: An estimate of neurons/spatial components in the scan.
:param int AR_order: Order of the autoregressive process used to model the impulse
response function, e.g., 0 = no modelling; 2 = model rise plus exponential decay.
:param int merge_threshold: Maximal temporal correlation allowed between activity of
overlapping components before merging them.
:param int num_processes: Number of processes to run in parallel. None for as many
processes as available cores.
:param int num_pixels_per_process: Number of pixels that a process handles each
iteration.
:param int block_size: 'number of pixels to process at the same time for dot product'
:param int num_background_components: Number of background components to use.
:param string init_method: Initialization method for the components.
'greedy_roi':Look for a gaussian-shaped patch, apply rank-1 NMF, store components,
calculate residual scan and repeat for num_components.
'sparse_nmf': Regularized non-negative matrix factorization (as impl. in sklearn)
'local_nmf': ...
:param (float, float) soma_radius: Estimated neuron radius (in pixels) in y and x.
Used in'greedy_roi' initialization to define the size of the gaussian window.
:param int snmf_alpha: Regularization parameter (alpha) for the sparse NMF (if used).
:param bool init_on_patches: If True, run the initialization methods on small patches
of the scan rather than on the whole image.
:param int patch_downsampling_factor: Division to the image dimensions to obtain patch
dimensions, e.g., if original size is 256 and factor is 10, patches will be 26x26
:param int percentage_of_patch_overlap: Patches are sampled in a sliding window. This
controls how much overlap is between adjacent patches (0 for none, 0.9 for 90%)
:returns Location matrix (image_height x image_width x num_components). Inferred
location of each component.
:returns Activity matrix (num_components x num_frames). Inferred fluorescence traces
(spike train convolved with the fitted impulse response function).
:returns: Inferred location matrix for background components (image_height x
image_width x num_background_components).
:returns: Inferred activity matrix for background components (image_height x
image_width x num_background_components).
:returns: Raw fluorescence traces (num_components x num_frames) obtained from the
scan minus activity from background and other components.
:returns: Spike matrix (num_components x num_frames). Deconvolved spike activity.
:returns: Autoregressive process coefficients (num_components x AR_order) used to
model the calcium impulse response of each component:
c(t) = c(t-1) * AR_coeffs[0] + c(t-2) * AR_coeffs[1] + ...
..note:: Based on code provided by Andrea Giovanucci.
..note:: The produced number of components is not exactly what you ask for because
some components will be merged or deleted.
..warning:: Computation- and memory-intensive for big scans.
"""
import caiman
from caiman.source_extraction.cnmf import cnmf
# Save as memory mapped file in F order (that's how caiman wants it)
mmap_filename = _save_as_memmap(scan, base_name='/tmp/caiman', order='F').filename
# 'Load' scan
mmap_scan, (image_height, image_width), num_frames = caiman.load_memmap(mmap_filename)
images = np.reshape(mmap_scan.T, (num_frames, image_height, image_width), order='F')
# Start the ipyparallel cluster
client, direct_view, num_processes = caiman.cluster.setup_cluster(
n_processes=num_processes)
# Optionally, run the initialization method in small patches to initialize components
initial_A = None
initial_C = None
initial_f = None
if init_on_patches:
# Calculate patch size (only square patches allowed)
bigger_dimension = max(image_height, image_width)
smaller_dimension = min(image_height, image_width)
patch_size = bigger_dimension / patch_downsampling_factor
patch_size = min(patch_size, smaller_dimension) # if bigger than small dimension
# Calculate num_components_per_patch
num_nonoverlapping_patches = (image_height/patch_size) * (image_width/patch_size)
num_components_per_patch = num_components / num_nonoverlapping_patches
num_components_per_patch = max(num_components_per_patch, 1) # at least 1
# Calculate patch overlap in pixels
overlap_in_pixels = patch_size * percentage_of_patch_overlap
# Make sure they are integers
patch_size = int(round(patch_size))
num_components_per_patch = int(round(num_components_per_patch))
overlap_in_pixels = int(round(overlap_in_pixels))
# Run CNMF on patches (only for initialization, no impulse response modelling p=0)
model = cnmf.CNMF(num_processes, only_init_patch=True, p=0,
rf=int(round(patch_size / 2)), stride=overlap_in_pixels,
k=num_components_per_patch, merge_thresh=merge_threshold,
method_init=init_method, gSig=soma_radius,
alpha_snmf=snmf_alpha, gnb=num_background_components,
n_pixels_per_process=num_pixels_per_process,
block_size=block_size, check_nan=False, dview=direct_view,
method_deconvolution='cvxpy')
model = model.fit(images)
# Delete log files (one per patch)
log_files = glob.glob('caiman*_LOG_*')
for log_file in log_files:
os.remove(log_file)
# Get results
initial_A = model.A
initial_C = model.C
initial_f = model.f
# Run CNMF
model = cnmf.CNMF(num_processes, k=num_components, p=AR_order,
merge_thresh=merge_threshold, gnb=num_background_components,
method_init=init_method, gSig=soma_radius, alpha_snmf=snmf_alpha,
n_pixels_per_process=num_pixels_per_process, block_size=block_size,
check_nan=False, dview=direct_view, Ain=initial_A, Cin=initial_C,
f_in=initial_f, method_deconvolution='cvxpy')
model = model.fit(images)
# Get final results
location_matrix = model.A # pixels x num_components
activity_matrix = model.C # num_components x num_frames
background_location_matrix = model.b # pixels x num_background_components
background_activity_matrix = model.f # num_background_components x num_frames
spikes = model.S # num_components x num_frames, spike_ traces
raw_traces = model.C + model.YrA # num_components x num_frames
AR_coefficients = model.g # AR_order x num_components
# Reshape spatial matrices to be image_height x image_width x num_frames
new_shape = (image_height, image_width, -1)
location_matrix = location_matrix.toarray().reshape(new_shape, order='F')
background_location_matrix = background_location_matrix.reshape(new_shape, order='F')
AR_coefficients = np.array(list(AR_coefficients)) # unwrapping it (num_components x 2)
# Stop ipyparallel cluster
client.close()
caiman.stop_server()
# Delete memory mapped scan
os.remove(mmap_filename)
return (location_matrix, activity_matrix, background_location_matrix,
background_activity_matrix, raw_traces, spikes, AR_coefficients)
|
cajal/pipeline
|
python/pipeline/utils/caiman_interface.py
|
Python
|
lgpl-3.0
| 32,356
|
[
"Gaussian",
"NEURON"
] |
db9d3b2139eea713b955f84ce0de3666b543864ee7e12d3ecd087e6443c291ac
|
import netCDF4 as nc4
import numpy as np
# Model datatype
float_type = "f8"
# Set the height
kmax = 512
zsize = 0.5
dz = zsize / kmax
# Set the profiles
z = np.linspace(0.5*dz, zsize-0.5*dz, kmax)
b = np.zeros(np.size(z))
b[0:int(kmax/2)] = 1.
# Write input NetCDF file
nc_file = nc4.Dataset('rayleightaylor_input.nc', mode='w', datamodel='NETCDF4', clobber=False)
nc_file.createDimension('z', kmax)
nc_z = nc_file.createVariable('z' , float_type, ('z'))
nc_z[:] = z[:]
nc_group_init = nc_file.createGroup('init');
nc_b = nc_group_init.createVariable('b' , float_type, ('z'))
nc_b[:] = b[:]
nc_file.close()
|
microhh/microhh2
|
cases/rayleightaylor/rayleightaylor_input.py
|
Python
|
gpl-3.0
| 615
|
[
"NetCDF"
] |
7dacda58ad5ed11453ca56375becc3c5145c6f4a0f6b5a157780b7cf3f53fb8d
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for MLlib Python DataFrame-based APIs.
"""
import sys
if sys.version > '3':
xrange = range
basestring = str
try:
import xmlrunner
except ImportError:
xmlrunner = None
if sys.version_info[:2] <= (2, 6):
try:
import unittest2 as unittest
except ImportError:
sys.stderr.write('Please install unittest2 to test with Python 2.6 or earlier')
sys.exit(1)
else:
import unittest
from shutil import rmtree
import tempfile
import array as pyarray
import numpy as np
from numpy import abs, all, arange, array, array_equal, inf, ones, tile, zeros
import inspect
from pyspark import keyword_only, SparkContext
from pyspark.ml import Estimator, Model, Pipeline, PipelineModel, Transformer, UnaryTransformer
from pyspark.ml.classification import *
from pyspark.ml.clustering import *
from pyspark.ml.common import _java2py, _py2java
from pyspark.ml.evaluation import BinaryClassificationEvaluator, \
MulticlassClassificationEvaluator, RegressionEvaluator
from pyspark.ml.feature import *
from pyspark.ml.fpm import FPGrowth, FPGrowthModel
from pyspark.ml.image import ImageSchema
from pyspark.ml.linalg import DenseMatrix, DenseMatrix, DenseVector, Matrices, MatrixUDT, \
SparseMatrix, SparseVector, Vector, VectorUDT, Vectors
from pyspark.ml.param import Param, Params, TypeConverters
from pyspark.ml.param.shared import HasInputCol, HasMaxIter, HasSeed
from pyspark.ml.recommendation import ALS
from pyspark.ml.regression import DecisionTreeRegressor, GeneralizedLinearRegression, \
LinearRegression
from pyspark.ml.stat import ChiSquareTest
from pyspark.ml.tuning import *
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaParams, JavaWrapper
from pyspark.serializers import PickleSerializer
from pyspark.sql import DataFrame, Row, SparkSession
from pyspark.sql.functions import rand
from pyspark.sql.types import DoubleType, IntegerType
from pyspark.storagelevel import *
from pyspark.tests import ReusedPySparkTestCase as PySparkTestCase
ser = PickleSerializer()
class MLlibTestCase(unittest.TestCase):
def setUp(self):
self.sc = SparkContext('local[4]', "MLlib tests")
self.spark = SparkSession(self.sc)
def tearDown(self):
self.spark.stop()
class SparkSessionTestCase(PySparkTestCase):
@classmethod
def setUpClass(cls):
PySparkTestCase.setUpClass()
cls.spark = SparkSession(cls.sc)
@classmethod
def tearDownClass(cls):
PySparkTestCase.tearDownClass()
cls.spark.stop()
class MockDataset(DataFrame):
def __init__(self):
self.index = 0
class HasFake(Params):
def __init__(self):
super(HasFake, self).__init__()
self.fake = Param(self, "fake", "fake param")
def getFake(self):
return self.getOrDefault(self.fake)
class MockTransformer(Transformer, HasFake):
def __init__(self):
super(MockTransformer, self).__init__()
self.dataset_index = None
def _transform(self, dataset):
self.dataset_index = dataset.index
dataset.index += 1
return dataset
class MockUnaryTransformer(UnaryTransformer, DefaultParamsReadable, DefaultParamsWritable):
shift = Param(Params._dummy(), "shift", "The amount by which to shift " +
"data in a DataFrame",
typeConverter=TypeConverters.toFloat)
def __init__(self, shiftVal=1):
super(MockUnaryTransformer, self).__init__()
self._setDefault(shift=1)
self._set(shift=shiftVal)
def getShift(self):
return self.getOrDefault(self.shift)
def setShift(self, shift):
self._set(shift=shift)
def createTransformFunc(self):
shiftVal = self.getShift()
return lambda x: x + shiftVal
def outputDataType(self):
return DoubleType()
def validateInputType(self, inputType):
if inputType != DoubleType():
raise TypeError("Bad input type: {}. ".format(inputType) +
"Requires Double.")
class MockEstimator(Estimator, HasFake):
def __init__(self):
super(MockEstimator, self).__init__()
self.dataset_index = None
def _fit(self, dataset):
self.dataset_index = dataset.index
model = MockModel()
self._copyValues(model)
return model
class MockModel(MockTransformer, Model, HasFake):
pass
class ParamTypeConversionTests(PySparkTestCase):
"""
Test that param type conversion happens.
"""
def test_int(self):
lr = LogisticRegression(maxIter=5.0)
self.assertEqual(lr.getMaxIter(), 5)
self.assertTrue(type(lr.getMaxIter()) == int)
self.assertRaises(TypeError, lambda: LogisticRegression(maxIter="notAnInt"))
self.assertRaises(TypeError, lambda: LogisticRegression(maxIter=5.1))
def test_float(self):
lr = LogisticRegression(tol=1)
self.assertEqual(lr.getTol(), 1.0)
self.assertTrue(type(lr.getTol()) == float)
self.assertRaises(TypeError, lambda: LogisticRegression(tol="notAFloat"))
def test_vector(self):
ewp = ElementwiseProduct(scalingVec=[1, 3])
self.assertEqual(ewp.getScalingVec(), DenseVector([1.0, 3.0]))
ewp = ElementwiseProduct(scalingVec=np.array([1.2, 3.4]))
self.assertEqual(ewp.getScalingVec(), DenseVector([1.2, 3.4]))
self.assertRaises(TypeError, lambda: ElementwiseProduct(scalingVec=["a", "b"]))
def test_list(self):
l = [0, 1]
for lst_like in [l, np.array(l), DenseVector(l), SparseVector(len(l),
range(len(l)), l), pyarray.array('l', l), xrange(2), tuple(l)]:
converted = TypeConverters.toList(lst_like)
self.assertEqual(type(converted), list)
self.assertListEqual(converted, l)
def test_list_int(self):
for indices in [[1.0, 2.0], np.array([1.0, 2.0]), DenseVector([1.0, 2.0]),
SparseVector(2, {0: 1.0, 1: 2.0}), xrange(1, 3), (1.0, 2.0),
pyarray.array('d', [1.0, 2.0])]:
vs = VectorSlicer(indices=indices)
self.assertListEqual(vs.getIndices(), [1, 2])
self.assertTrue(all([type(v) == int for v in vs.getIndices()]))
self.assertRaises(TypeError, lambda: VectorSlicer(indices=["a", "b"]))
def test_list_float(self):
b = Bucketizer(splits=[1, 4])
self.assertEqual(b.getSplits(), [1.0, 4.0])
self.assertTrue(all([type(v) == float for v in b.getSplits()]))
self.assertRaises(TypeError, lambda: Bucketizer(splits=["a", 1.0]))
def test_list_string(self):
for labels in [np.array(['a', u'b']), ['a', u'b'], np.array(['a', 'b'])]:
idx_to_string = IndexToString(labels=labels)
self.assertListEqual(idx_to_string.getLabels(), ['a', 'b'])
self.assertRaises(TypeError, lambda: IndexToString(labels=['a', 2]))
def test_string(self):
lr = LogisticRegression()
for col in ['features', u'features', np.str_('features')]:
lr.setFeaturesCol(col)
self.assertEqual(lr.getFeaturesCol(), 'features')
self.assertRaises(TypeError, lambda: LogisticRegression(featuresCol=2.3))
def test_bool(self):
self.assertRaises(TypeError, lambda: LogisticRegression(fitIntercept=1))
self.assertRaises(TypeError, lambda: LogisticRegression(fitIntercept="false"))
class PipelineTests(PySparkTestCase):
def test_pipeline(self):
dataset = MockDataset()
estimator0 = MockEstimator()
transformer1 = MockTransformer()
estimator2 = MockEstimator()
transformer3 = MockTransformer()
pipeline = Pipeline(stages=[estimator0, transformer1, estimator2, transformer3])
pipeline_model = pipeline.fit(dataset, {estimator0.fake: 0, transformer1.fake: 1})
model0, transformer1, model2, transformer3 = pipeline_model.stages
self.assertEqual(0, model0.dataset_index)
self.assertEqual(0, model0.getFake())
self.assertEqual(1, transformer1.dataset_index)
self.assertEqual(1, transformer1.getFake())
self.assertEqual(2, dataset.index)
self.assertIsNone(model2.dataset_index, "The last model shouldn't be called in fit.")
self.assertIsNone(transformer3.dataset_index,
"The last transformer shouldn't be called in fit.")
dataset = pipeline_model.transform(dataset)
self.assertEqual(2, model0.dataset_index)
self.assertEqual(3, transformer1.dataset_index)
self.assertEqual(4, model2.dataset_index)
self.assertEqual(5, transformer3.dataset_index)
self.assertEqual(6, dataset.index)
def test_identity_pipeline(self):
dataset = MockDataset()
def doTransform(pipeline):
pipeline_model = pipeline.fit(dataset)
return pipeline_model.transform(dataset)
# check that empty pipeline did not perform any transformation
self.assertEqual(dataset.index, doTransform(Pipeline(stages=[])).index)
# check that failure to set stages param will raise KeyError for missing param
self.assertRaises(KeyError, lambda: doTransform(Pipeline()))
class TestParams(HasMaxIter, HasInputCol, HasSeed):
"""
A subclass of Params mixed with HasMaxIter, HasInputCol and HasSeed.
"""
@keyword_only
def __init__(self, seed=None):
super(TestParams, self).__init__()
self._setDefault(maxIter=10)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, seed=None):
"""
setParams(self, seed=None)
Sets params for this test.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
class OtherTestParams(HasMaxIter, HasInputCol, HasSeed):
"""
A subclass of Params mixed with HasMaxIter, HasInputCol and HasSeed.
"""
@keyword_only
def __init__(self, seed=None):
super(OtherTestParams, self).__init__()
self._setDefault(maxIter=10)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, seed=None):
"""
setParams(self, seed=None)
Sets params for this test.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
class HasThrowableProperty(Params):
def __init__(self):
super(HasThrowableProperty, self).__init__()
self.p = Param(self, "none", "empty param")
@property
def test_property(self):
raise RuntimeError("Test property to raise error when invoked")
class ParamTests(PySparkTestCase):
def test_copy_new_parent(self):
testParams = TestParams()
# Copying an instantiated param should fail
with self.assertRaises(ValueError):
testParams.maxIter._copy_new_parent(testParams)
# Copying a dummy param should succeed
TestParams.maxIter._copy_new_parent(testParams)
maxIter = testParams.maxIter
self.assertEqual(maxIter.name, "maxIter")
self.assertEqual(maxIter.doc, "max number of iterations (>= 0).")
self.assertTrue(maxIter.parent == testParams.uid)
def test_param(self):
testParams = TestParams()
maxIter = testParams.maxIter
self.assertEqual(maxIter.name, "maxIter")
self.assertEqual(maxIter.doc, "max number of iterations (>= 0).")
self.assertTrue(maxIter.parent == testParams.uid)
def test_hasparam(self):
testParams = TestParams()
self.assertTrue(all([testParams.hasParam(p.name) for p in testParams.params]))
self.assertFalse(testParams.hasParam("notAParameter"))
self.assertTrue(testParams.hasParam(u"maxIter"))
def test_resolveparam(self):
testParams = TestParams()
self.assertEqual(testParams._resolveParam(testParams.maxIter), testParams.maxIter)
self.assertEqual(testParams._resolveParam("maxIter"), testParams.maxIter)
self.assertEqual(testParams._resolveParam(u"maxIter"), testParams.maxIter)
if sys.version_info[0] >= 3:
# In Python 3, it is allowed to get/set attributes with non-ascii characters.
e_cls = AttributeError
else:
e_cls = UnicodeEncodeError
self.assertRaises(e_cls, lambda: testParams._resolveParam(u"아"))
def test_params(self):
testParams = TestParams()
maxIter = testParams.maxIter
inputCol = testParams.inputCol
seed = testParams.seed
params = testParams.params
self.assertEqual(params, [inputCol, maxIter, seed])
self.assertTrue(testParams.hasParam(maxIter.name))
self.assertTrue(testParams.hasDefault(maxIter))
self.assertFalse(testParams.isSet(maxIter))
self.assertTrue(testParams.isDefined(maxIter))
self.assertEqual(testParams.getMaxIter(), 10)
testParams.setMaxIter(100)
self.assertTrue(testParams.isSet(maxIter))
self.assertEqual(testParams.getMaxIter(), 100)
self.assertTrue(testParams.hasParam(inputCol.name))
self.assertFalse(testParams.hasDefault(inputCol))
self.assertFalse(testParams.isSet(inputCol))
self.assertFalse(testParams.isDefined(inputCol))
with self.assertRaises(KeyError):
testParams.getInputCol()
otherParam = Param(Params._dummy(), "otherParam", "Parameter used to test that " +
"set raises an error for a non-member parameter.",
typeConverter=TypeConverters.toString)
with self.assertRaises(ValueError):
testParams.set(otherParam, "value")
# Since the default is normally random, set it to a known number for debug str
testParams._setDefault(seed=41)
testParams.setSeed(43)
self.assertEqual(
testParams.explainParams(),
"\n".join(["inputCol: input column name. (undefined)",
"maxIter: max number of iterations (>= 0). (default: 10, current: 100)",
"seed: random seed. (default: 41, current: 43)"]))
def test_kmeans_param(self):
algo = KMeans()
self.assertEqual(algo.getInitMode(), "k-means||")
algo.setK(10)
self.assertEqual(algo.getK(), 10)
algo.setInitSteps(10)
self.assertEqual(algo.getInitSteps(), 10)
def test_hasseed(self):
noSeedSpecd = TestParams()
withSeedSpecd = TestParams(seed=42)
other = OtherTestParams()
# Check that we no longer use 42 as the magic number
self.assertNotEqual(noSeedSpecd.getSeed(), 42)
origSeed = noSeedSpecd.getSeed()
# Check that we only compute the seed once
self.assertEqual(noSeedSpecd.getSeed(), origSeed)
# Check that a specified seed is honored
self.assertEqual(withSeedSpecd.getSeed(), 42)
# Check that a different class has a different seed
self.assertNotEqual(other.getSeed(), noSeedSpecd.getSeed())
def test_param_property_error(self):
param_store = HasThrowableProperty()
self.assertRaises(RuntimeError, lambda: param_store.test_property)
params = param_store.params # should not invoke the property 'test_property'
self.assertEqual(len(params), 1)
def test_word2vec_param(self):
model = Word2Vec().setWindowSize(6)
# Check windowSize is set properly
self.assertEqual(model.getWindowSize(), 6)
def test_copy_param_extras(self):
tp = TestParams(seed=42)
extra = {tp.getParam(TestParams.inputCol.name): "copy_input"}
tp_copy = tp.copy(extra=extra)
self.assertEqual(tp.uid, tp_copy.uid)
self.assertEqual(tp.params, tp_copy.params)
for k, v in extra.items():
self.assertTrue(tp_copy.isDefined(k))
self.assertEqual(tp_copy.getOrDefault(k), v)
copied_no_extra = {}
for k, v in tp_copy._paramMap.items():
if k not in extra:
copied_no_extra[k] = v
self.assertEqual(tp._paramMap, copied_no_extra)
self.assertEqual(tp._defaultParamMap, tp_copy._defaultParamMap)
def test_logistic_regression_check_thresholds(self):
self.assertIsInstance(
LogisticRegression(threshold=0.5, thresholds=[0.5, 0.5]),
LogisticRegression
)
self.assertRaisesRegexp(
ValueError,
"Logistic Regression getThreshold found inconsistent.*$",
LogisticRegression, threshold=0.42, thresholds=[0.5, 0.5]
)
@staticmethod
def check_params(test_self, py_stage, check_params_exist=True):
"""
Checks common requirements for Params.params:
- set of params exist in Java and Python and are ordered by names
- param parent has the same UID as the object's UID
- default param value from Java matches value in Python
- optionally check if all params from Java also exist in Python
"""
py_stage_str = "%s %s" % (type(py_stage), py_stage)
if not hasattr(py_stage, "_to_java"):
return
java_stage = py_stage._to_java()
if java_stage is None:
return
test_self.assertEqual(py_stage.uid, java_stage.uid(), msg=py_stage_str)
if check_params_exist:
param_names = [p.name for p in py_stage.params]
java_params = list(java_stage.params())
java_param_names = [jp.name() for jp in java_params]
test_self.assertEqual(
param_names, sorted(java_param_names),
"Param list in Python does not match Java for %s:\nJava = %s\nPython = %s"
% (py_stage_str, java_param_names, param_names))
for p in py_stage.params:
test_self.assertEqual(p.parent, py_stage.uid)
java_param = java_stage.getParam(p.name)
py_has_default = py_stage.hasDefault(p)
java_has_default = java_stage.hasDefault(java_param)
test_self.assertEqual(py_has_default, java_has_default,
"Default value mismatch of param %s for Params %s"
% (p.name, str(py_stage)))
if py_has_default:
if p.name == "seed":
continue # Random seeds between Spark and PySpark are different
java_default = _java2py(test_self.sc,
java_stage.clear(java_param).getOrDefault(java_param))
py_stage._clear(p)
py_default = py_stage.getOrDefault(p)
# equality test for NaN is always False
if isinstance(java_default, float) and np.isnan(java_default):
java_default = "NaN"
py_default = "NaN" if np.isnan(py_default) else "not NaN"
test_self.assertEqual(
java_default, py_default,
"Java default %s != python default %s of param %s for Params %s"
% (str(java_default), str(py_default), p.name, str(py_stage)))
class EvaluatorTests(SparkSessionTestCase):
def test_java_params(self):
"""
This tests a bug fixed by SPARK-18274 which causes multiple copies
of a Params instance in Python to be linked to the same Java instance.
"""
evaluator = RegressionEvaluator(metricName="r2")
df = self.spark.createDataFrame([Row(label=1.0, prediction=1.1)])
evaluator.evaluate(df)
self.assertEqual(evaluator._java_obj.getMetricName(), "r2")
evaluatorCopy = evaluator.copy({evaluator.metricName: "mae"})
evaluator.evaluate(df)
evaluatorCopy.evaluate(df)
self.assertEqual(evaluator._java_obj.getMetricName(), "r2")
self.assertEqual(evaluatorCopy._java_obj.getMetricName(), "mae")
class FeatureTests(SparkSessionTestCase):
def test_binarizer(self):
b0 = Binarizer()
self.assertListEqual(b0.params, [b0.inputCol, b0.outputCol, b0.threshold])
self.assertTrue(all([~b0.isSet(p) for p in b0.params]))
self.assertTrue(b0.hasDefault(b0.threshold))
self.assertEqual(b0.getThreshold(), 0.0)
b0.setParams(inputCol="input", outputCol="output").setThreshold(1.0)
self.assertTrue(all([b0.isSet(p) for p in b0.params]))
self.assertEqual(b0.getThreshold(), 1.0)
self.assertEqual(b0.getInputCol(), "input")
self.assertEqual(b0.getOutputCol(), "output")
b0c = b0.copy({b0.threshold: 2.0})
self.assertEqual(b0c.uid, b0.uid)
self.assertListEqual(b0c.params, b0.params)
self.assertEqual(b0c.getThreshold(), 2.0)
b1 = Binarizer(threshold=2.0, inputCol="input", outputCol="output")
self.assertNotEqual(b1.uid, b0.uid)
self.assertEqual(b1.getThreshold(), 2.0)
self.assertEqual(b1.getInputCol(), "input")
self.assertEqual(b1.getOutputCol(), "output")
def test_idf(self):
dataset = self.spark.createDataFrame([
(DenseVector([1.0, 2.0]),),
(DenseVector([0.0, 1.0]),),
(DenseVector([3.0, 0.2]),)], ["tf"])
idf0 = IDF(inputCol="tf")
self.assertListEqual(idf0.params, [idf0.inputCol, idf0.minDocFreq, idf0.outputCol])
idf0m = idf0.fit(dataset, {idf0.outputCol: "idf"})
self.assertEqual(idf0m.uid, idf0.uid,
"Model should inherit the UID from its parent estimator.")
output = idf0m.transform(dataset)
self.assertIsNotNone(output.head().idf)
# Test that parameters transferred to Python Model
ParamTests.check_params(self, idf0m)
def test_ngram(self):
dataset = self.spark.createDataFrame([
Row(input=["a", "b", "c", "d", "e"])])
ngram0 = NGram(n=4, inputCol="input", outputCol="output")
self.assertEqual(ngram0.getN(), 4)
self.assertEqual(ngram0.getInputCol(), "input")
self.assertEqual(ngram0.getOutputCol(), "output")
transformedDF = ngram0.transform(dataset)
self.assertEqual(transformedDF.head().output, ["a b c d", "b c d e"])
def test_stopwordsremover(self):
dataset = self.spark.createDataFrame([Row(input=["a", "panda"])])
stopWordRemover = StopWordsRemover(inputCol="input", outputCol="output")
# Default
self.assertEqual(stopWordRemover.getInputCol(), "input")
transformedDF = stopWordRemover.transform(dataset)
self.assertEqual(transformedDF.head().output, ["panda"])
self.assertEqual(type(stopWordRemover.getStopWords()), list)
self.assertTrue(isinstance(stopWordRemover.getStopWords()[0], basestring))
# Custom
stopwords = ["panda"]
stopWordRemover.setStopWords(stopwords)
self.assertEqual(stopWordRemover.getInputCol(), "input")
self.assertEqual(stopWordRemover.getStopWords(), stopwords)
transformedDF = stopWordRemover.transform(dataset)
self.assertEqual(transformedDF.head().output, ["a"])
# with language selection
stopwords = StopWordsRemover.loadDefaultStopWords("turkish")
dataset = self.spark.createDataFrame([Row(input=["acaba", "ama", "biri"])])
stopWordRemover.setStopWords(stopwords)
self.assertEqual(stopWordRemover.getStopWords(), stopwords)
transformedDF = stopWordRemover.transform(dataset)
self.assertEqual(transformedDF.head().output, [])
def test_count_vectorizer_with_binary(self):
dataset = self.spark.createDataFrame([
(0, "a a a b b c".split(' '), SparseVector(3, {0: 1.0, 1: 1.0, 2: 1.0}),),
(1, "a a".split(' '), SparseVector(3, {0: 1.0}),),
(2, "a b".split(' '), SparseVector(3, {0: 1.0, 1: 1.0}),),
(3, "c".split(' '), SparseVector(3, {2: 1.0}),)], ["id", "words", "expected"])
cv = CountVectorizer(binary=True, inputCol="words", outputCol="features")
model = cv.fit(dataset)
transformedList = model.transform(dataset).select("features", "expected").collect()
for r in transformedList:
feature, expected = r
self.assertEqual(feature, expected)
def test_rformula_force_index_label(self):
df = self.spark.createDataFrame([
(1.0, 1.0, "a"),
(0.0, 2.0, "b"),
(1.0, 0.0, "a")], ["y", "x", "s"])
# Does not index label by default since it's numeric type.
rf = RFormula(formula="y ~ x + s")
model = rf.fit(df)
transformedDF = model.transform(df)
self.assertEqual(transformedDF.head().label, 1.0)
# Force to index label.
rf2 = RFormula(formula="y ~ x + s").setForceIndexLabel(True)
model2 = rf2.fit(df)
transformedDF2 = model2.transform(df)
self.assertEqual(transformedDF2.head().label, 0.0)
def test_rformula_string_indexer_order_type(self):
df = self.spark.createDataFrame([
(1.0, 1.0, "a"),
(0.0, 2.0, "b"),
(1.0, 0.0, "a")], ["y", "x", "s"])
rf = RFormula(formula="y ~ x + s", stringIndexerOrderType="alphabetDesc")
self.assertEqual(rf.getStringIndexerOrderType(), 'alphabetDesc')
transformedDF = rf.fit(df).transform(df)
observed = transformedDF.select("features").collect()
expected = [[1.0, 0.0], [2.0, 1.0], [0.0, 0.0]]
for i in range(0, len(expected)):
self.assertTrue(all(observed[i]["features"].toArray() == expected[i]))
def test_string_indexer_handle_invalid(self):
df = self.spark.createDataFrame([
(0, "a"),
(1, "d"),
(2, None)], ["id", "label"])
si1 = StringIndexer(inputCol="label", outputCol="indexed", handleInvalid="keep",
stringOrderType="alphabetAsc")
model1 = si1.fit(df)
td1 = model1.transform(df)
actual1 = td1.select("id", "indexed").collect()
expected1 = [Row(id=0, indexed=0.0), Row(id=1, indexed=1.0), Row(id=2, indexed=2.0)]
self.assertEqual(actual1, expected1)
si2 = si1.setHandleInvalid("skip")
model2 = si2.fit(df)
td2 = model2.transform(df)
actual2 = td2.select("id", "indexed").collect()
expected2 = [Row(id=0, indexed=0.0), Row(id=1, indexed=1.0)]
self.assertEqual(actual2, expected2)
class HasInducedError(Params):
def __init__(self):
super(HasInducedError, self).__init__()
self.inducedError = Param(self, "inducedError",
"Uniformly-distributed error added to feature")
def getInducedError(self):
return self.getOrDefault(self.inducedError)
class InducedErrorModel(Model, HasInducedError):
def __init__(self):
super(InducedErrorModel, self).__init__()
def _transform(self, dataset):
return dataset.withColumn("prediction",
dataset.feature + (rand(0) * self.getInducedError()))
class InducedErrorEstimator(Estimator, HasInducedError):
def __init__(self, inducedError=1.0):
super(InducedErrorEstimator, self).__init__()
self._set(inducedError=inducedError)
def _fit(self, dataset):
model = InducedErrorModel()
self._copyValues(model)
return model
class CrossValidatorTests(SparkSessionTestCase):
def test_copy(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="rmse")
grid = (ParamGridBuilder()
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0])
.build())
cv = CrossValidator(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
cvCopied = cv.copy()
self.assertEqual(cv.getEstimator().uid, cvCopied.getEstimator().uid)
cvModel = cv.fit(dataset)
cvModelCopied = cvModel.copy()
for index in range(len(cvModel.avgMetrics)):
self.assertTrue(abs(cvModel.avgMetrics[index] - cvModelCopied.avgMetrics[index])
< 0.0001)
def test_fit_minimize_metric(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="rmse")
grid = (ParamGridBuilder()
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0])
.build())
cv = CrossValidator(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
cvModel = cv.fit(dataset)
bestModel = cvModel.bestModel
bestModelMetric = evaluator.evaluate(bestModel.transform(dataset))
self.assertEqual(0.0, bestModel.getOrDefault('inducedError'),
"Best model should have zero induced error")
self.assertEqual(0.0, bestModelMetric, "Best model has RMSE of 0")
def test_fit_maximize_metric(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="r2")
grid = (ParamGridBuilder()
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0])
.build())
cv = CrossValidator(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
cvModel = cv.fit(dataset)
bestModel = cvModel.bestModel
bestModelMetric = evaluator.evaluate(bestModel.transform(dataset))
self.assertEqual(0.0, bestModel.getOrDefault('inducedError'),
"Best model should have zero induced error")
self.assertEqual(1.0, bestModelMetric, "Best model has R-squared of 1")
def test_save_load_trained_model(self):
# This tests saving and loading the trained model only.
# Save/load for CrossValidator will be added later: SPARK-13786
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1]).build()
evaluator = BinaryClassificationEvaluator()
cv = CrossValidator(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
cvModel = cv.fit(dataset)
lrModel = cvModel.bestModel
cvModelPath = temp_path + "/cvModel"
lrModel.save(cvModelPath)
loadedLrModel = LogisticRegressionModel.load(cvModelPath)
self.assertEqual(loadedLrModel.uid, lrModel.uid)
self.assertEqual(loadedLrModel.intercept, lrModel.intercept)
def test_save_load_simple_estimator(self):
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1]).build()
evaluator = BinaryClassificationEvaluator()
# test save/load of CrossValidator
cv = CrossValidator(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
cvModel = cv.fit(dataset)
cvPath = temp_path + "/cv"
cv.save(cvPath)
loadedCV = CrossValidator.load(cvPath)
self.assertEqual(loadedCV.getEstimator().uid, cv.getEstimator().uid)
self.assertEqual(loadedCV.getEvaluator().uid, cv.getEvaluator().uid)
self.assertEqual(loadedCV.getEstimatorParamMaps(), cv.getEstimatorParamMaps())
# test save/load of CrossValidatorModel
cvModelPath = temp_path + "/cvModel"
cvModel.save(cvModelPath)
loadedModel = CrossValidatorModel.load(cvModelPath)
self.assertEqual(loadedModel.bestModel.uid, cvModel.bestModel.uid)
def test_parallel_evaluation(self):
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [5, 6]).build()
evaluator = BinaryClassificationEvaluator()
# test save/load of CrossValidator
cv = CrossValidator(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
cv.setParallelism(1)
cvSerialModel = cv.fit(dataset)
cv.setParallelism(2)
cvParallelModel = cv.fit(dataset)
self.assertEqual(cvSerialModel.avgMetrics, cvParallelModel.avgMetrics)
def test_save_load_nested_estimator(self):
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
ova = OneVsRest(classifier=LogisticRegression())
lr1 = LogisticRegression().setMaxIter(100)
lr2 = LogisticRegression().setMaxIter(150)
grid = ParamGridBuilder().addGrid(ova.classifier, [lr1, lr2]).build()
evaluator = MulticlassClassificationEvaluator()
# test save/load of CrossValidator
cv = CrossValidator(estimator=ova, estimatorParamMaps=grid, evaluator=evaluator)
cvModel = cv.fit(dataset)
cvPath = temp_path + "/cv"
cv.save(cvPath)
loadedCV = CrossValidator.load(cvPath)
self.assertEqual(loadedCV.getEstimator().uid, cv.getEstimator().uid)
self.assertEqual(loadedCV.getEvaluator().uid, cv.getEvaluator().uid)
originalParamMap = cv.getEstimatorParamMaps()
loadedParamMap = loadedCV.getEstimatorParamMaps()
for i, param in enumerate(loadedParamMap):
for p in param:
if p.name == "classifier":
self.assertEqual(param[p].uid, originalParamMap[i][p].uid)
else:
self.assertEqual(param[p], originalParamMap[i][p])
# test save/load of CrossValidatorModel
cvModelPath = temp_path + "/cvModel"
cvModel.save(cvModelPath)
loadedModel = CrossValidatorModel.load(cvModelPath)
self.assertEqual(loadedModel.bestModel.uid, cvModel.bestModel.uid)
class TrainValidationSplitTests(SparkSessionTestCase):
def test_fit_minimize_metric(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="rmse")
grid = ParamGridBuilder() \
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0]) \
.build()
tvs = TrainValidationSplit(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
bestModel = tvsModel.bestModel
bestModelMetric = evaluator.evaluate(bestModel.transform(dataset))
validationMetrics = tvsModel.validationMetrics
self.assertEqual(0.0, bestModel.getOrDefault('inducedError'),
"Best model should have zero induced error")
self.assertEqual(0.0, bestModelMetric, "Best model has RMSE of 0")
self.assertEqual(len(grid), len(validationMetrics),
"validationMetrics has the same size of grid parameter")
self.assertEqual(0.0, min(validationMetrics))
def test_fit_maximize_metric(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="r2")
grid = ParamGridBuilder() \
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0]) \
.build()
tvs = TrainValidationSplit(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
bestModel = tvsModel.bestModel
bestModelMetric = evaluator.evaluate(bestModel.transform(dataset))
validationMetrics = tvsModel.validationMetrics
self.assertEqual(0.0, bestModel.getOrDefault('inducedError'),
"Best model should have zero induced error")
self.assertEqual(1.0, bestModelMetric, "Best model has R-squared of 1")
self.assertEqual(len(grid), len(validationMetrics),
"validationMetrics has the same size of grid parameter")
self.assertEqual(1.0, max(validationMetrics))
def test_save_load_trained_model(self):
# This tests saving and loading the trained model only.
# Save/load for TrainValidationSplit will be added later: SPARK-13786
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1]).build()
evaluator = BinaryClassificationEvaluator()
tvs = TrainValidationSplit(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
lrModel = tvsModel.bestModel
tvsModelPath = temp_path + "/tvsModel"
lrModel.save(tvsModelPath)
loadedLrModel = LogisticRegressionModel.load(tvsModelPath)
self.assertEqual(loadedLrModel.uid, lrModel.uid)
self.assertEqual(loadedLrModel.intercept, lrModel.intercept)
def test_save_load_simple_estimator(self):
# This tests saving and loading the trained model only.
# Save/load for TrainValidationSplit will be added later: SPARK-13786
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1]).build()
evaluator = BinaryClassificationEvaluator()
tvs = TrainValidationSplit(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
tvsPath = temp_path + "/tvs"
tvs.save(tvsPath)
loadedTvs = TrainValidationSplit.load(tvsPath)
self.assertEqual(loadedTvs.getEstimator().uid, tvs.getEstimator().uid)
self.assertEqual(loadedTvs.getEvaluator().uid, tvs.getEvaluator().uid)
self.assertEqual(loadedTvs.getEstimatorParamMaps(), tvs.getEstimatorParamMaps())
tvsModelPath = temp_path + "/tvsModel"
tvsModel.save(tvsModelPath)
loadedModel = TrainValidationSplitModel.load(tvsModelPath)
self.assertEqual(loadedModel.bestModel.uid, tvsModel.bestModel.uid)
def test_parallel_evaluation(self):
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
lr = LogisticRegression()
grid = ParamGridBuilder().addGrid(lr.maxIter, [5, 6]).build()
evaluator = BinaryClassificationEvaluator()
tvs = TrainValidationSplit(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
tvs.setParallelism(1)
tvsSerialModel = tvs.fit(dataset)
tvs.setParallelism(2)
tvsParallelModel = tvs.fit(dataset)
self.assertEqual(tvsSerialModel.validationMetrics, tvsParallelModel.validationMetrics)
def test_save_load_nested_estimator(self):
# This tests saving and loading the trained model only.
# Save/load for TrainValidationSplit will be added later: SPARK-13786
temp_path = tempfile.mkdtemp()
dataset = self.spark.createDataFrame(
[(Vectors.dense([0.0]), 0.0),
(Vectors.dense([0.4]), 1.0),
(Vectors.dense([0.5]), 0.0),
(Vectors.dense([0.6]), 1.0),
(Vectors.dense([1.0]), 1.0)] * 10,
["features", "label"])
ova = OneVsRest(classifier=LogisticRegression())
lr1 = LogisticRegression().setMaxIter(100)
lr2 = LogisticRegression().setMaxIter(150)
grid = ParamGridBuilder().addGrid(ova.classifier, [lr1, lr2]).build()
evaluator = MulticlassClassificationEvaluator()
tvs = TrainValidationSplit(estimator=ova, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
tvsPath = temp_path + "/tvs"
tvs.save(tvsPath)
loadedTvs = TrainValidationSplit.load(tvsPath)
self.assertEqual(loadedTvs.getEstimator().uid, tvs.getEstimator().uid)
self.assertEqual(loadedTvs.getEvaluator().uid, tvs.getEvaluator().uid)
originalParamMap = tvs.getEstimatorParamMaps()
loadedParamMap = loadedTvs.getEstimatorParamMaps()
for i, param in enumerate(loadedParamMap):
for p in param:
if p.name == "classifier":
self.assertEqual(param[p].uid, originalParamMap[i][p].uid)
else:
self.assertEqual(param[p], originalParamMap[i][p])
tvsModelPath = temp_path + "/tvsModel"
tvsModel.save(tvsModelPath)
loadedModel = TrainValidationSplitModel.load(tvsModelPath)
self.assertEqual(loadedModel.bestModel.uid, tvsModel.bestModel.uid)
def test_copy(self):
dataset = self.spark.createDataFrame([
(10, 10.0),
(50, 50.0),
(100, 100.0),
(500, 500.0)] * 10,
["feature", "label"])
iee = InducedErrorEstimator()
evaluator = RegressionEvaluator(metricName="r2")
grid = ParamGridBuilder() \
.addGrid(iee.inducedError, [100.0, 0.0, 10000.0]) \
.build()
tvs = TrainValidationSplit(estimator=iee, estimatorParamMaps=grid, evaluator=evaluator)
tvsModel = tvs.fit(dataset)
tvsCopied = tvs.copy()
tvsModelCopied = tvsModel.copy()
self.assertEqual(tvs.getEstimator().uid, tvsCopied.getEstimator().uid,
"Copied TrainValidationSplit has the same uid of Estimator")
self.assertEqual(tvsModel.bestModel.uid, tvsModelCopied.bestModel.uid)
self.assertEqual(len(tvsModel.validationMetrics),
len(tvsModelCopied.validationMetrics),
"Copied validationMetrics has the same size of the original")
for index in range(len(tvsModel.validationMetrics)):
self.assertEqual(tvsModel.validationMetrics[index],
tvsModelCopied.validationMetrics[index])
class PersistenceTest(SparkSessionTestCase):
def test_linear_regression(self):
lr = LinearRegression(maxIter=1)
path = tempfile.mkdtemp()
lr_path = path + "/lr"
lr.save(lr_path)
lr2 = LinearRegression.load(lr_path)
self.assertEqual(lr.uid, lr2.uid)
self.assertEqual(type(lr.uid), type(lr2.uid))
self.assertEqual(lr2.uid, lr2.maxIter.parent,
"Loaded LinearRegression instance uid (%s) did not match Param's uid (%s)"
% (lr2.uid, lr2.maxIter.parent))
self.assertEqual(lr._defaultParamMap[lr.maxIter], lr2._defaultParamMap[lr2.maxIter],
"Loaded LinearRegression instance default params did not match " +
"original defaults")
try:
rmtree(path)
except OSError:
pass
def test_logistic_regression(self):
lr = LogisticRegression(maxIter=1)
path = tempfile.mkdtemp()
lr_path = path + "/logreg"
lr.save(lr_path)
lr2 = LogisticRegression.load(lr_path)
self.assertEqual(lr2.uid, lr2.maxIter.parent,
"Loaded LogisticRegression instance uid (%s) "
"did not match Param's uid (%s)"
% (lr2.uid, lr2.maxIter.parent))
self.assertEqual(lr._defaultParamMap[lr.maxIter], lr2._defaultParamMap[lr2.maxIter],
"Loaded LogisticRegression instance default params did not match " +
"original defaults")
try:
rmtree(path)
except OSError:
pass
def _compare_params(self, m1, m2, param):
"""
Compare 2 ML Params instances for the given param, and assert both have the same param value
and parent. The param must be a parameter of m1.
"""
# Prevent key not found error in case of some param in neither paramMap nor defaultParamMap.
if m1.isDefined(param):
paramValue1 = m1.getOrDefault(param)
paramValue2 = m2.getOrDefault(m2.getParam(param.name))
if isinstance(paramValue1, Params):
self._compare_pipelines(paramValue1, paramValue2)
else:
self.assertEqual(paramValue1, paramValue2) # for general types param
# Assert parents are equal
self.assertEqual(param.parent, m2.getParam(param.name).parent)
else:
# If m1 is not defined param, then m2 should not, too. See SPARK-14931.
self.assertFalse(m2.isDefined(m2.getParam(param.name)))
def _compare_pipelines(self, m1, m2):
"""
Compare 2 ML types, asserting that they are equivalent.
This currently supports:
- basic types
- Pipeline, PipelineModel
- OneVsRest, OneVsRestModel
This checks:
- uid
- type
- Param values and parents
"""
self.assertEqual(m1.uid, m2.uid)
self.assertEqual(type(m1), type(m2))
if isinstance(m1, JavaParams) or isinstance(m1, Transformer):
self.assertEqual(len(m1.params), len(m2.params))
for p in m1.params:
self._compare_params(m1, m2, p)
elif isinstance(m1, Pipeline):
self.assertEqual(len(m1.getStages()), len(m2.getStages()))
for s1, s2 in zip(m1.getStages(), m2.getStages()):
self._compare_pipelines(s1, s2)
elif isinstance(m1, PipelineModel):
self.assertEqual(len(m1.stages), len(m2.stages))
for s1, s2 in zip(m1.stages, m2.stages):
self._compare_pipelines(s1, s2)
elif isinstance(m1, OneVsRest) or isinstance(m1, OneVsRestModel):
for p in m1.params:
self._compare_params(m1, m2, p)
if isinstance(m1, OneVsRestModel):
self.assertEqual(len(m1.models), len(m2.models))
for x, y in zip(m1.models, m2.models):
self._compare_pipelines(x, y)
else:
raise RuntimeError("_compare_pipelines does not yet support type: %s" % type(m1))
def test_pipeline_persistence(self):
"""
Pipeline[HashingTF, PCA]
"""
temp_path = tempfile.mkdtemp()
try:
df = self.spark.createDataFrame([(["a", "b", "c"],), (["c", "d", "e"],)], ["words"])
tf = HashingTF(numFeatures=10, inputCol="words", outputCol="features")
pca = PCA(k=2, inputCol="features", outputCol="pca_features")
pl = Pipeline(stages=[tf, pca])
model = pl.fit(df)
pipeline_path = temp_path + "/pipeline"
pl.save(pipeline_path)
loaded_pipeline = Pipeline.load(pipeline_path)
self._compare_pipelines(pl, loaded_pipeline)
model_path = temp_path + "/pipeline-model"
model.save(model_path)
loaded_model = PipelineModel.load(model_path)
self._compare_pipelines(model, loaded_model)
finally:
try:
rmtree(temp_path)
except OSError:
pass
def test_nested_pipeline_persistence(self):
"""
Pipeline[HashingTF, Pipeline[PCA]]
"""
temp_path = tempfile.mkdtemp()
try:
df = self.spark.createDataFrame([(["a", "b", "c"],), (["c", "d", "e"],)], ["words"])
tf = HashingTF(numFeatures=10, inputCol="words", outputCol="features")
pca = PCA(k=2, inputCol="features", outputCol="pca_features")
p0 = Pipeline(stages=[pca])
pl = Pipeline(stages=[tf, p0])
model = pl.fit(df)
pipeline_path = temp_path + "/pipeline"
pl.save(pipeline_path)
loaded_pipeline = Pipeline.load(pipeline_path)
self._compare_pipelines(pl, loaded_pipeline)
model_path = temp_path + "/pipeline-model"
model.save(model_path)
loaded_model = PipelineModel.load(model_path)
self._compare_pipelines(model, loaded_model)
finally:
try:
rmtree(temp_path)
except OSError:
pass
def test_python_transformer_pipeline_persistence(self):
"""
Pipeline[MockUnaryTransformer, Binarizer]
"""
temp_path = tempfile.mkdtemp()
try:
df = self.spark.range(0, 10).toDF('input')
tf = MockUnaryTransformer(shiftVal=2)\
.setInputCol("input").setOutputCol("shiftedInput")
tf2 = Binarizer(threshold=6, inputCol="shiftedInput", outputCol="binarized")
pl = Pipeline(stages=[tf, tf2])
model = pl.fit(df)
pipeline_path = temp_path + "/pipeline"
pl.save(pipeline_path)
loaded_pipeline = Pipeline.load(pipeline_path)
self._compare_pipelines(pl, loaded_pipeline)
model_path = temp_path + "/pipeline-model"
model.save(model_path)
loaded_model = PipelineModel.load(model_path)
self._compare_pipelines(model, loaded_model)
finally:
try:
rmtree(temp_path)
except OSError:
pass
def test_onevsrest(self):
temp_path = tempfile.mkdtemp()
df = self.spark.createDataFrame([(0.0, Vectors.dense(1.0, 0.8)),
(1.0, Vectors.sparse(2, [], [])),
(2.0, Vectors.dense(0.5, 0.5))] * 10,
["label", "features"])
lr = LogisticRegression(maxIter=5, regParam=0.01)
ovr = OneVsRest(classifier=lr)
model = ovr.fit(df)
ovrPath = temp_path + "/ovr"
ovr.save(ovrPath)
loadedOvr = OneVsRest.load(ovrPath)
self._compare_pipelines(ovr, loadedOvr)
modelPath = temp_path + "/ovrModel"
model.save(modelPath)
loadedModel = OneVsRestModel.load(modelPath)
self._compare_pipelines(model, loadedModel)
def test_decisiontree_classifier(self):
dt = DecisionTreeClassifier(maxDepth=1)
path = tempfile.mkdtemp()
dtc_path = path + "/dtc"
dt.save(dtc_path)
dt2 = DecisionTreeClassifier.load(dtc_path)
self.assertEqual(dt2.uid, dt2.maxDepth.parent,
"Loaded DecisionTreeClassifier instance uid (%s) "
"did not match Param's uid (%s)"
% (dt2.uid, dt2.maxDepth.parent))
self.assertEqual(dt._defaultParamMap[dt.maxDepth], dt2._defaultParamMap[dt2.maxDepth],
"Loaded DecisionTreeClassifier instance default params did not match " +
"original defaults")
try:
rmtree(path)
except OSError:
pass
def test_decisiontree_regressor(self):
dt = DecisionTreeRegressor(maxDepth=1)
path = tempfile.mkdtemp()
dtr_path = path + "/dtr"
dt.save(dtr_path)
dt2 = DecisionTreeClassifier.load(dtr_path)
self.assertEqual(dt2.uid, dt2.maxDepth.parent,
"Loaded DecisionTreeRegressor instance uid (%s) "
"did not match Param's uid (%s)"
% (dt2.uid, dt2.maxDepth.parent))
self.assertEqual(dt._defaultParamMap[dt.maxDepth], dt2._defaultParamMap[dt2.maxDepth],
"Loaded DecisionTreeRegressor instance default params did not match " +
"original defaults")
try:
rmtree(path)
except OSError:
pass
def test_default_read_write(self):
temp_path = tempfile.mkdtemp()
lr = LogisticRegression()
lr.setMaxIter(50)
lr.setThreshold(.75)
writer = DefaultParamsWriter(lr)
savePath = temp_path + "/lr"
writer.save(savePath)
reader = DefaultParamsReadable.read()
lr2 = reader.load(savePath)
self.assertEqual(lr.uid, lr2.uid)
self.assertEqual(lr.extractParamMap(), lr2.extractParamMap())
# test overwrite
lr.setThreshold(.8)
writer.overwrite().save(savePath)
reader = DefaultParamsReadable.read()
lr3 = reader.load(savePath)
self.assertEqual(lr.uid, lr3.uid)
self.assertEqual(lr.extractParamMap(), lr3.extractParamMap())
class LDATest(SparkSessionTestCase):
def _compare(self, m1, m2):
"""
Temp method for comparing instances.
TODO: Replace with generic implementation once SPARK-14706 is merged.
"""
self.assertEqual(m1.uid, m2.uid)
self.assertEqual(type(m1), type(m2))
self.assertEqual(len(m1.params), len(m2.params))
for p in m1.params:
if m1.isDefined(p):
self.assertEqual(m1.getOrDefault(p), m2.getOrDefault(p))
self.assertEqual(p.parent, m2.getParam(p.name).parent)
if isinstance(m1, LDAModel):
self.assertEqual(m1.vocabSize(), m2.vocabSize())
self.assertEqual(m1.topicsMatrix(), m2.topicsMatrix())
def test_persistence(self):
# Test save/load for LDA, LocalLDAModel, DistributedLDAModel.
df = self.spark.createDataFrame([
[1, Vectors.dense([0.0, 1.0])],
[2, Vectors.sparse(2, {0: 1.0})],
], ["id", "features"])
# Fit model
lda = LDA(k=2, seed=1, optimizer="em")
distributedModel = lda.fit(df)
self.assertTrue(distributedModel.isDistributed())
localModel = distributedModel.toLocal()
self.assertFalse(localModel.isDistributed())
# Define paths
path = tempfile.mkdtemp()
lda_path = path + "/lda"
dist_model_path = path + "/distLDAModel"
local_model_path = path + "/localLDAModel"
# Test LDA
lda.save(lda_path)
lda2 = LDA.load(lda_path)
self._compare(lda, lda2)
# Test DistributedLDAModel
distributedModel.save(dist_model_path)
distributedModel2 = DistributedLDAModel.load(dist_model_path)
self._compare(distributedModel, distributedModel2)
# Test LocalLDAModel
localModel.save(local_model_path)
localModel2 = LocalLDAModel.load(local_model_path)
self._compare(localModel, localModel2)
# Clean up
try:
rmtree(path)
except OSError:
pass
class TrainingSummaryTest(SparkSessionTestCase):
def test_linear_regression_summary(self):
df = self.spark.createDataFrame([(1.0, 2.0, Vectors.dense(1.0)),
(0.0, 2.0, Vectors.sparse(1, [], []))],
["label", "weight", "features"])
lr = LinearRegression(maxIter=5, regParam=0.0, solver="normal", weightCol="weight",
fitIntercept=False)
model = lr.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
# test that api is callable and returns expected types
self.assertGreater(s.totalIterations, 0)
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.predictionCol, "prediction")
self.assertEqual(s.labelCol, "label")
self.assertEqual(s.featuresCol, "features")
objHist = s.objectiveHistory
self.assertTrue(isinstance(objHist, list) and isinstance(objHist[0], float))
self.assertAlmostEqual(s.explainedVariance, 0.25, 2)
self.assertAlmostEqual(s.meanAbsoluteError, 0.0)
self.assertAlmostEqual(s.meanSquaredError, 0.0)
self.assertAlmostEqual(s.rootMeanSquaredError, 0.0)
self.assertAlmostEqual(s.r2, 1.0, 2)
self.assertTrue(isinstance(s.residuals, DataFrame))
self.assertEqual(s.numInstances, 2)
self.assertEqual(s.degreesOfFreedom, 1)
devResiduals = s.devianceResiduals
self.assertTrue(isinstance(devResiduals, list) and isinstance(devResiduals[0], float))
coefStdErr = s.coefficientStandardErrors
self.assertTrue(isinstance(coefStdErr, list) and isinstance(coefStdErr[0], float))
tValues = s.tValues
self.assertTrue(isinstance(tValues, list) and isinstance(tValues[0], float))
pValues = s.pValues
self.assertTrue(isinstance(pValues, list) and isinstance(pValues[0], float))
# test evaluation (with training dataset) produces a summary with same values
# one check is enough to verify a summary is returned
# The child class LinearRegressionTrainingSummary runs full test
sameSummary = model.evaluate(df)
self.assertAlmostEqual(sameSummary.explainedVariance, s.explainedVariance)
def test_glr_summary(self):
from pyspark.ml.linalg import Vectors
df = self.spark.createDataFrame([(1.0, 2.0, Vectors.dense(1.0)),
(0.0, 2.0, Vectors.sparse(1, [], []))],
["label", "weight", "features"])
glr = GeneralizedLinearRegression(family="gaussian", link="identity", weightCol="weight",
fitIntercept=False)
model = glr.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
# test that api is callable and returns expected types
self.assertEqual(s.numIterations, 1) # this should default to a single iteration of WLS
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.predictionCol, "prediction")
self.assertEqual(s.numInstances, 2)
self.assertTrue(isinstance(s.residuals(), DataFrame))
self.assertTrue(isinstance(s.residuals("pearson"), DataFrame))
coefStdErr = s.coefficientStandardErrors
self.assertTrue(isinstance(coefStdErr, list) and isinstance(coefStdErr[0], float))
tValues = s.tValues
self.assertTrue(isinstance(tValues, list) and isinstance(tValues[0], float))
pValues = s.pValues
self.assertTrue(isinstance(pValues, list) and isinstance(pValues[0], float))
self.assertEqual(s.degreesOfFreedom, 1)
self.assertEqual(s.residualDegreeOfFreedom, 1)
self.assertEqual(s.residualDegreeOfFreedomNull, 2)
self.assertEqual(s.rank, 1)
self.assertTrue(isinstance(s.solver, basestring))
self.assertTrue(isinstance(s.aic, float))
self.assertTrue(isinstance(s.deviance, float))
self.assertTrue(isinstance(s.nullDeviance, float))
self.assertTrue(isinstance(s.dispersion, float))
# test evaluation (with training dataset) produces a summary with same values
# one check is enough to verify a summary is returned
# The child class GeneralizedLinearRegressionTrainingSummary runs full test
sameSummary = model.evaluate(df)
self.assertAlmostEqual(sameSummary.deviance, s.deviance)
def test_binary_logistic_regression_summary(self):
df = self.spark.createDataFrame([(1.0, 2.0, Vectors.dense(1.0)),
(0.0, 2.0, Vectors.sparse(1, [], []))],
["label", "weight", "features"])
lr = LogisticRegression(maxIter=5, regParam=0.01, weightCol="weight", fitIntercept=False)
model = lr.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
# test that api is callable and returns expected types
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.probabilityCol, "probability")
self.assertEqual(s.labelCol, "label")
self.assertEqual(s.featuresCol, "features")
self.assertEqual(s.predictionCol, "prediction")
objHist = s.objectiveHistory
self.assertTrue(isinstance(objHist, list) and isinstance(objHist[0], float))
self.assertGreater(s.totalIterations, 0)
self.assertTrue(isinstance(s.labels, list))
self.assertTrue(isinstance(s.truePositiveRateByLabel, list))
self.assertTrue(isinstance(s.falsePositiveRateByLabel, list))
self.assertTrue(isinstance(s.precisionByLabel, list))
self.assertTrue(isinstance(s.recallByLabel, list))
self.assertTrue(isinstance(s.fMeasureByLabel(), list))
self.assertTrue(isinstance(s.fMeasureByLabel(1.0), list))
self.assertTrue(isinstance(s.roc, DataFrame))
self.assertAlmostEqual(s.areaUnderROC, 1.0, 2)
self.assertTrue(isinstance(s.pr, DataFrame))
self.assertTrue(isinstance(s.fMeasureByThreshold, DataFrame))
self.assertTrue(isinstance(s.precisionByThreshold, DataFrame))
self.assertTrue(isinstance(s.recallByThreshold, DataFrame))
self.assertAlmostEqual(s.accuracy, 1.0, 2)
self.assertAlmostEqual(s.weightedTruePositiveRate, 1.0, 2)
self.assertAlmostEqual(s.weightedFalsePositiveRate, 0.0, 2)
self.assertAlmostEqual(s.weightedRecall, 1.0, 2)
self.assertAlmostEqual(s.weightedPrecision, 1.0, 2)
self.assertAlmostEqual(s.weightedFMeasure(), 1.0, 2)
self.assertAlmostEqual(s.weightedFMeasure(1.0), 1.0, 2)
# test evaluation (with training dataset) produces a summary with same values
# one check is enough to verify a summary is returned, Scala version runs full test
sameSummary = model.evaluate(df)
self.assertAlmostEqual(sameSummary.areaUnderROC, s.areaUnderROC)
def test_multiclass_logistic_regression_summary(self):
df = self.spark.createDataFrame([(1.0, 2.0, Vectors.dense(1.0)),
(0.0, 2.0, Vectors.sparse(1, [], [])),
(2.0, 2.0, Vectors.dense(2.0)),
(2.0, 2.0, Vectors.dense(1.9))],
["label", "weight", "features"])
lr = LogisticRegression(maxIter=5, regParam=0.01, weightCol="weight", fitIntercept=False)
model = lr.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
# test that api is callable and returns expected types
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.probabilityCol, "probability")
self.assertEqual(s.labelCol, "label")
self.assertEqual(s.featuresCol, "features")
self.assertEqual(s.predictionCol, "prediction")
objHist = s.objectiveHistory
self.assertTrue(isinstance(objHist, list) and isinstance(objHist[0], float))
self.assertGreater(s.totalIterations, 0)
self.assertTrue(isinstance(s.labels, list))
self.assertTrue(isinstance(s.truePositiveRateByLabel, list))
self.assertTrue(isinstance(s.falsePositiveRateByLabel, list))
self.assertTrue(isinstance(s.precisionByLabel, list))
self.assertTrue(isinstance(s.recallByLabel, list))
self.assertTrue(isinstance(s.fMeasureByLabel(), list))
self.assertTrue(isinstance(s.fMeasureByLabel(1.0), list))
self.assertAlmostEqual(s.accuracy, 0.75, 2)
self.assertAlmostEqual(s.weightedTruePositiveRate, 0.75, 2)
self.assertAlmostEqual(s.weightedFalsePositiveRate, 0.25, 2)
self.assertAlmostEqual(s.weightedRecall, 0.75, 2)
self.assertAlmostEqual(s.weightedPrecision, 0.583, 2)
self.assertAlmostEqual(s.weightedFMeasure(), 0.65, 2)
self.assertAlmostEqual(s.weightedFMeasure(1.0), 0.65, 2)
# test evaluation (with training dataset) produces a summary with same values
# one check is enough to verify a summary is returned, Scala version runs full test
sameSummary = model.evaluate(df)
self.assertAlmostEqual(sameSummary.accuracy, s.accuracy)
def test_gaussian_mixture_summary(self):
data = [(Vectors.dense(1.0),), (Vectors.dense(5.0),), (Vectors.dense(10.0),),
(Vectors.sparse(1, [], []),)]
df = self.spark.createDataFrame(data, ["features"])
gmm = GaussianMixture(k=2)
model = gmm.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.probabilityCol, "probability")
self.assertTrue(isinstance(s.probability, DataFrame))
self.assertEqual(s.featuresCol, "features")
self.assertEqual(s.predictionCol, "prediction")
self.assertTrue(isinstance(s.cluster, DataFrame))
self.assertEqual(len(s.clusterSizes), 2)
self.assertEqual(s.k, 2)
def test_bisecting_kmeans_summary(self):
data = [(Vectors.dense(1.0),), (Vectors.dense(5.0),), (Vectors.dense(10.0),),
(Vectors.sparse(1, [], []),)]
df = self.spark.createDataFrame(data, ["features"])
bkm = BisectingKMeans(k=2)
model = bkm.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.featuresCol, "features")
self.assertEqual(s.predictionCol, "prediction")
self.assertTrue(isinstance(s.cluster, DataFrame))
self.assertEqual(len(s.clusterSizes), 2)
self.assertEqual(s.k, 2)
def test_kmeans_summary(self):
data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
(Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
df = self.spark.createDataFrame(data, ["features"])
kmeans = KMeans(k=2, seed=1)
model = kmeans.fit(df)
self.assertTrue(model.hasSummary)
s = model.summary
self.assertTrue(isinstance(s.predictions, DataFrame))
self.assertEqual(s.featuresCol, "features")
self.assertEqual(s.predictionCol, "prediction")
self.assertTrue(isinstance(s.cluster, DataFrame))
self.assertEqual(len(s.clusterSizes), 2)
self.assertEqual(s.k, 2)
class OneVsRestTests(SparkSessionTestCase):
def test_copy(self):
df = self.spark.createDataFrame([(0.0, Vectors.dense(1.0, 0.8)),
(1.0, Vectors.sparse(2, [], [])),
(2.0, Vectors.dense(0.5, 0.5))],
["label", "features"])
lr = LogisticRegression(maxIter=5, regParam=0.01)
ovr = OneVsRest(classifier=lr)
ovr1 = ovr.copy({lr.maxIter: 10})
self.assertEqual(ovr.getClassifier().getMaxIter(), 5)
self.assertEqual(ovr1.getClassifier().getMaxIter(), 10)
model = ovr.fit(df)
model1 = model.copy({model.predictionCol: "indexed"})
self.assertEqual(model1.getPredictionCol(), "indexed")
def test_output_columns(self):
df = self.spark.createDataFrame([(0.0, Vectors.dense(1.0, 0.8)),
(1.0, Vectors.sparse(2, [], [])),
(2.0, Vectors.dense(0.5, 0.5))],
["label", "features"])
lr = LogisticRegression(maxIter=5, regParam=0.01)
ovr = OneVsRest(classifier=lr, parallelism=1)
model = ovr.fit(df)
output = model.transform(df)
self.assertEqual(output.columns, ["label", "features", "prediction"])
def test_parallelism_doesnt_change_output(self):
df = self.spark.createDataFrame([(0.0, Vectors.dense(1.0, 0.8)),
(1.0, Vectors.sparse(2, [], [])),
(2.0, Vectors.dense(0.5, 0.5))],
["label", "features"])
ovrPar1 = OneVsRest(classifier=LogisticRegression(maxIter=5, regParam=.01), parallelism=1)
modelPar1 = ovrPar1.fit(df)
ovrPar2 = OneVsRest(classifier=LogisticRegression(maxIter=5, regParam=.01), parallelism=2)
modelPar2 = ovrPar2.fit(df)
for i, model in enumerate(modelPar1.models):
self.assertTrue(np.allclose(model.coefficients.toArray(),
modelPar2.models[i].coefficients.toArray(), atol=1E-4))
self.assertTrue(np.allclose(model.intercept, modelPar2.models[i].intercept, atol=1E-4))
def test_support_for_weightCol(self):
df = self.spark.createDataFrame([(0.0, Vectors.dense(1.0, 0.8), 1.0),
(1.0, Vectors.sparse(2, [], []), 1.0),
(2.0, Vectors.dense(0.5, 0.5), 1.0)],
["label", "features", "weight"])
# classifier inherits hasWeightCol
lr = LogisticRegression(maxIter=5, regParam=0.01)
ovr = OneVsRest(classifier=lr, weightCol="weight")
self.assertIsNotNone(ovr.fit(df))
# classifier doesn't inherit hasWeightCol
dt = DecisionTreeClassifier()
ovr2 = OneVsRest(classifier=dt, weightCol="weight")
self.assertIsNotNone(ovr2.fit(df))
class HashingTFTest(SparkSessionTestCase):
def test_apply_binary_term_freqs(self):
df = self.spark.createDataFrame([(0, ["a", "a", "b", "c", "c", "c"])], ["id", "words"])
n = 10
hashingTF = HashingTF()
hashingTF.setInputCol("words").setOutputCol("features").setNumFeatures(n).setBinary(True)
output = hashingTF.transform(df)
features = output.select("features").first().features.toArray()
expected = Vectors.dense([1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]).toArray()
for i in range(0, n):
self.assertAlmostEqual(features[i], expected[i], 14, "Error at " + str(i) +
": expected " + str(expected[i]) + ", got " + str(features[i]))
class GeneralizedLinearRegressionTest(SparkSessionTestCase):
def test_tweedie_distribution(self):
df = self.spark.createDataFrame(
[(1.0, Vectors.dense(0.0, 0.0)),
(1.0, Vectors.dense(1.0, 2.0)),
(2.0, Vectors.dense(0.0, 0.0)),
(2.0, Vectors.dense(1.0, 1.0)), ], ["label", "features"])
glr = GeneralizedLinearRegression(family="tweedie", variancePower=1.6)
model = glr.fit(df)
self.assertTrue(np.allclose(model.coefficients.toArray(), [-0.4645, 0.3402], atol=1E-4))
self.assertTrue(np.isclose(model.intercept, 0.7841, atol=1E-4))
model2 = glr.setLinkPower(-1.0).fit(df)
self.assertTrue(np.allclose(model2.coefficients.toArray(), [-0.6667, 0.5], atol=1E-4))
self.assertTrue(np.isclose(model2.intercept, 0.6667, atol=1E-4))
def test_offset(self):
df = self.spark.createDataFrame(
[(0.2, 1.0, 2.0, Vectors.dense(0.0, 5.0)),
(0.5, 2.1, 0.5, Vectors.dense(1.0, 2.0)),
(0.9, 0.4, 1.0, Vectors.dense(2.0, 1.0)),
(0.7, 0.7, 0.0, Vectors.dense(3.0, 3.0))], ["label", "weight", "offset", "features"])
glr = GeneralizedLinearRegression(family="poisson", weightCol="weight", offsetCol="offset")
model = glr.fit(df)
self.assertTrue(np.allclose(model.coefficients.toArray(), [0.664647, -0.3192581],
atol=1E-4))
self.assertTrue(np.isclose(model.intercept, -1.561613, atol=1E-4))
class LogisticRegressionTest(SparkSessionTestCase):
def test_binomial_logistic_regression_with_bound(self):
df = self.spark.createDataFrame(
[(1.0, 1.0, Vectors.dense(0.0, 5.0)),
(0.0, 2.0, Vectors.dense(1.0, 2.0)),
(1.0, 3.0, Vectors.dense(2.0, 1.0)),
(0.0, 4.0, Vectors.dense(3.0, 3.0)), ], ["label", "weight", "features"])
lor = LogisticRegression(regParam=0.01, weightCol="weight",
lowerBoundsOnCoefficients=Matrices.dense(1, 2, [-1.0, -1.0]),
upperBoundsOnIntercepts=Vectors.dense(0.0))
model = lor.fit(df)
self.assertTrue(
np.allclose(model.coefficients.toArray(), [-0.2944, -0.0484], atol=1E-4))
self.assertTrue(np.isclose(model.intercept, 0.0, atol=1E-4))
def test_multinomial_logistic_regression_with_bound(self):
data_path = "data/mllib/sample_multiclass_classification_data.txt"
df = self.spark.read.format("libsvm").load(data_path)
lor = LogisticRegression(regParam=0.01,
lowerBoundsOnCoefficients=Matrices.dense(3, 4, range(12)),
upperBoundsOnIntercepts=Vectors.dense(0.0, 0.0, 0.0))
model = lor.fit(df)
expected = [[4.593, 4.5516, 9.0099, 12.2904],
[1.0, 8.1093, 7.0, 10.0],
[3.041, 5.0, 8.0, 11.0]]
for i in range(0, len(expected)):
self.assertTrue(
np.allclose(model.coefficientMatrix.toArray()[i], expected[i], atol=1E-4))
self.assertTrue(
np.allclose(model.interceptVector.toArray(), [-0.9057, -1.1392, -0.0033], atol=1E-4))
class MultilayerPerceptronClassifierTest(SparkSessionTestCase):
def test_raw_and_probability_prediction(self):
data_path = "data/mllib/sample_multiclass_classification_data.txt"
df = self.spark.read.format("libsvm").load(data_path)
mlp = MultilayerPerceptronClassifier(maxIter=100, layers=[4, 5, 4, 3],
blockSize=128, seed=123)
model = mlp.fit(df)
test = self.sc.parallelize([Row(features=Vectors.dense(0.1, 0.1, 0.25, 0.25))]).toDF()
result = model.transform(test).head()
expected_prediction = 2.0
expected_probability = [0.0, 0.0, 1.0]
expected_rawPrediction = [57.3955, -124.5462, 67.9943]
self.assertTrue(result.prediction, expected_prediction)
self.assertTrue(np.allclose(result.probability, expected_probability, atol=1E-4))
self.assertTrue(np.allclose(result.rawPrediction, expected_rawPrediction, atol=1E-4))
class FPGrowthTests(SparkSessionTestCase):
def setUp(self):
super(FPGrowthTests, self).setUp()
self.data = self.spark.createDataFrame(
[([1, 2], ), ([1, 2], ), ([1, 2, 3], ), ([1, 3], )],
["items"])
def test_association_rules(self):
fp = FPGrowth()
fpm = fp.fit(self.data)
expected_association_rules = self.spark.createDataFrame(
[([3], [1], 1.0), ([2], [1], 1.0)],
["antecedent", "consequent", "confidence"]
)
actual_association_rules = fpm.associationRules
self.assertEqual(actual_association_rules.subtract(expected_association_rules).count(), 0)
self.assertEqual(expected_association_rules.subtract(actual_association_rules).count(), 0)
def test_freq_itemsets(self):
fp = FPGrowth()
fpm = fp.fit(self.data)
expected_freq_itemsets = self.spark.createDataFrame(
[([1], 4), ([2], 3), ([2, 1], 3), ([3], 2), ([3, 1], 2)],
["items", "freq"]
)
actual_freq_itemsets = fpm.freqItemsets
self.assertEqual(actual_freq_itemsets.subtract(expected_freq_itemsets).count(), 0)
self.assertEqual(expected_freq_itemsets.subtract(actual_freq_itemsets).count(), 0)
def tearDown(self):
del self.data
class ImageReaderTest(SparkSessionTestCase):
def test_read_images(self):
data_path = 'data/mllib/images/kittens'
df = ImageSchema.readImages(data_path, recursive=True, dropImageFailures=True)
self.assertEqual(df.count(), 4)
first_row = df.take(1)[0][0]
array = ImageSchema.toNDArray(first_row)
self.assertEqual(len(array), first_row[1])
self.assertEqual(ImageSchema.toImage(array, origin=first_row[0]), first_row)
self.assertEqual(df.schema, ImageSchema.imageSchema)
expected = {'CV_8UC3': 16, 'Undefined': -1, 'CV_8U': 0, 'CV_8UC1': 0, 'CV_8UC4': 24}
self.assertEqual(ImageSchema.ocvTypes, expected)
expected = ['origin', 'height', 'width', 'nChannels', 'mode', 'data']
self.assertEqual(ImageSchema.imageFields, expected)
self.assertEqual(ImageSchema.undefinedImageType, "Undefined")
class ALSTest(SparkSessionTestCase):
def test_storage_levels(self):
df = self.spark.createDataFrame(
[(0, 0, 4.0), (0, 1, 2.0), (1, 1, 3.0), (1, 2, 4.0), (2, 1, 1.0), (2, 2, 5.0)],
["user", "item", "rating"])
als = ALS().setMaxIter(1).setRank(1)
# test default params
als.fit(df)
self.assertEqual(als.getIntermediateStorageLevel(), "MEMORY_AND_DISK")
self.assertEqual(als._java_obj.getIntermediateStorageLevel(), "MEMORY_AND_DISK")
self.assertEqual(als.getFinalStorageLevel(), "MEMORY_AND_DISK")
self.assertEqual(als._java_obj.getFinalStorageLevel(), "MEMORY_AND_DISK")
# test non-default params
als.setIntermediateStorageLevel("MEMORY_ONLY_2")
als.setFinalStorageLevel("DISK_ONLY")
als.fit(df)
self.assertEqual(als.getIntermediateStorageLevel(), "MEMORY_ONLY_2")
self.assertEqual(als._java_obj.getIntermediateStorageLevel(), "MEMORY_ONLY_2")
self.assertEqual(als.getFinalStorageLevel(), "DISK_ONLY")
self.assertEqual(als._java_obj.getFinalStorageLevel(), "DISK_ONLY")
class DefaultValuesTests(PySparkTestCase):
"""
Test :py:class:`JavaParams` classes to see if their default Param values match
those in their Scala counterparts.
"""
def test_java_params(self):
import pyspark.ml.feature
import pyspark.ml.classification
import pyspark.ml.clustering
import pyspark.ml.pipeline
import pyspark.ml.recommendation
import pyspark.ml.regression
modules = [pyspark.ml.feature, pyspark.ml.classification, pyspark.ml.clustering,
pyspark.ml.pipeline, pyspark.ml.recommendation, pyspark.ml.regression]
for module in modules:
for name, cls in inspect.getmembers(module, inspect.isclass):
if not name.endswith('Model') and issubclass(cls, JavaParams)\
and not inspect.isabstract(cls):
# NOTE: disable check_params_exist until there is parity with Scala API
ParamTests.check_params(self, cls(), check_params_exist=False)
def _squared_distance(a, b):
if isinstance(a, Vector):
return a.squared_distance(b)
else:
return b.squared_distance(a)
class VectorTests(MLlibTestCase):
def _test_serialize(self, v):
self.assertEqual(v, ser.loads(ser.dumps(v)))
jvec = self.sc._jvm.org.apache.spark.ml.python.MLSerDe.loads(bytearray(ser.dumps(v)))
nv = ser.loads(bytes(self.sc._jvm.org.apache.spark.ml.python.MLSerDe.dumps(jvec)))
self.assertEqual(v, nv)
vs = [v] * 100
jvecs = self.sc._jvm.org.apache.spark.ml.python.MLSerDe.loads(bytearray(ser.dumps(vs)))
nvs = ser.loads(bytes(self.sc._jvm.org.apache.spark.ml.python.MLSerDe.dumps(jvecs)))
self.assertEqual(vs, nvs)
def test_serialize(self):
self._test_serialize(DenseVector(range(10)))
self._test_serialize(DenseVector(array([1., 2., 3., 4.])))
self._test_serialize(DenseVector(pyarray.array('d', range(10))))
self._test_serialize(SparseVector(4, {1: 1, 3: 2}))
self._test_serialize(SparseVector(3, {}))
self._test_serialize(DenseMatrix(2, 3, range(6)))
sm1 = SparseMatrix(
3, 4, [0, 2, 2, 4, 4], [1, 2, 1, 2], [1.0, 2.0, 4.0, 5.0])
self._test_serialize(sm1)
def test_dot(self):
sv = SparseVector(4, {1: 1, 3: 2})
dv = DenseVector(array([1., 2., 3., 4.]))
lst = DenseVector([1, 2, 3, 4])
mat = array([[1., 2., 3., 4.],
[1., 2., 3., 4.],
[1., 2., 3., 4.],
[1., 2., 3., 4.]])
arr = pyarray.array('d', [0, 1, 2, 3])
self.assertEqual(10.0, sv.dot(dv))
self.assertTrue(array_equal(array([3., 6., 9., 12.]), sv.dot(mat)))
self.assertEqual(30.0, dv.dot(dv))
self.assertTrue(array_equal(array([10., 20., 30., 40.]), dv.dot(mat)))
self.assertEqual(30.0, lst.dot(dv))
self.assertTrue(array_equal(array([10., 20., 30., 40.]), lst.dot(mat)))
self.assertEqual(7.0, sv.dot(arr))
def test_squared_distance(self):
sv = SparseVector(4, {1: 1, 3: 2})
dv = DenseVector(array([1., 2., 3., 4.]))
lst = DenseVector([4, 3, 2, 1])
lst1 = [4, 3, 2, 1]
arr = pyarray.array('d', [0, 2, 1, 3])
narr = array([0, 2, 1, 3])
self.assertEqual(15.0, _squared_distance(sv, dv))
self.assertEqual(25.0, _squared_distance(sv, lst))
self.assertEqual(20.0, _squared_distance(dv, lst))
self.assertEqual(15.0, _squared_distance(dv, sv))
self.assertEqual(25.0, _squared_distance(lst, sv))
self.assertEqual(20.0, _squared_distance(lst, dv))
self.assertEqual(0.0, _squared_distance(sv, sv))
self.assertEqual(0.0, _squared_distance(dv, dv))
self.assertEqual(0.0, _squared_distance(lst, lst))
self.assertEqual(25.0, _squared_distance(sv, lst1))
self.assertEqual(3.0, _squared_distance(sv, arr))
self.assertEqual(3.0, _squared_distance(sv, narr))
def test_hash(self):
v1 = DenseVector([0.0, 1.0, 0.0, 5.5])
v2 = SparseVector(4, [(1, 1.0), (3, 5.5)])
v3 = DenseVector([0.0, 1.0, 0.0, 5.5])
v4 = SparseVector(4, [(1, 1.0), (3, 2.5)])
self.assertEqual(hash(v1), hash(v2))
self.assertEqual(hash(v1), hash(v3))
self.assertEqual(hash(v2), hash(v3))
self.assertFalse(hash(v1) == hash(v4))
self.assertFalse(hash(v2) == hash(v4))
def test_eq(self):
v1 = DenseVector([0.0, 1.0, 0.0, 5.5])
v2 = SparseVector(4, [(1, 1.0), (3, 5.5)])
v3 = DenseVector([0.0, 1.0, 0.0, 5.5])
v4 = SparseVector(6, [(1, 1.0), (3, 5.5)])
v5 = DenseVector([0.0, 1.0, 0.0, 2.5])
v6 = SparseVector(4, [(1, 1.0), (3, 2.5)])
self.assertEqual(v1, v2)
self.assertEqual(v1, v3)
self.assertFalse(v2 == v4)
self.assertFalse(v1 == v5)
self.assertFalse(v1 == v6)
def test_equals(self):
indices = [1, 2, 4]
values = [1., 3., 2.]
self.assertTrue(Vectors._equals(indices, values, list(range(5)), [0., 1., 3., 0., 2.]))
self.assertFalse(Vectors._equals(indices, values, list(range(5)), [0., 3., 1., 0., 2.]))
self.assertFalse(Vectors._equals(indices, values, list(range(5)), [0., 3., 0., 2.]))
self.assertFalse(Vectors._equals(indices, values, list(range(5)), [0., 1., 3., 2., 2.]))
def test_conversion(self):
# numpy arrays should be automatically upcast to float64
# tests for fix of [SPARK-5089]
v = array([1, 2, 3, 4], dtype='float64')
dv = DenseVector(v)
self.assertTrue(dv.array.dtype == 'float64')
v = array([1, 2, 3, 4], dtype='float32')
dv = DenseVector(v)
self.assertTrue(dv.array.dtype == 'float64')
def test_sparse_vector_indexing(self):
sv = SparseVector(5, {1: 1, 3: 2})
self.assertEqual(sv[0], 0.)
self.assertEqual(sv[3], 2.)
self.assertEqual(sv[1], 1.)
self.assertEqual(sv[2], 0.)
self.assertEqual(sv[4], 0.)
self.assertEqual(sv[-1], 0.)
self.assertEqual(sv[-2], 2.)
self.assertEqual(sv[-3], 0.)
self.assertEqual(sv[-5], 0.)
for ind in [5, -6]:
self.assertRaises(IndexError, sv.__getitem__, ind)
for ind in [7.8, '1']:
self.assertRaises(TypeError, sv.__getitem__, ind)
zeros = SparseVector(4, {})
self.assertEqual(zeros[0], 0.0)
self.assertEqual(zeros[3], 0.0)
for ind in [4, -5]:
self.assertRaises(IndexError, zeros.__getitem__, ind)
empty = SparseVector(0, {})
for ind in [-1, 0, 1]:
self.assertRaises(IndexError, empty.__getitem__, ind)
def test_sparse_vector_iteration(self):
self.assertListEqual(list(SparseVector(3, [], [])), [0.0, 0.0, 0.0])
self.assertListEqual(list(SparseVector(5, [0, 3], [1.0, 2.0])), [1.0, 0.0, 0.0, 2.0, 0.0])
def test_matrix_indexing(self):
mat = DenseMatrix(3, 2, [0, 1, 4, 6, 8, 10])
expected = [[0, 6], [1, 8], [4, 10]]
for i in range(3):
for j in range(2):
self.assertEqual(mat[i, j], expected[i][j])
for i, j in [(-1, 0), (4, 1), (3, 4)]:
self.assertRaises(IndexError, mat.__getitem__, (i, j))
def test_repr_dense_matrix(self):
mat = DenseMatrix(3, 2, [0, 1, 4, 6, 8, 10])
self.assertTrue(
repr(mat),
'DenseMatrix(3, 2, [0.0, 1.0, 4.0, 6.0, 8.0, 10.0], False)')
mat = DenseMatrix(3, 2, [0, 1, 4, 6, 8, 10], True)
self.assertTrue(
repr(mat),
'DenseMatrix(3, 2, [0.0, 1.0, 4.0, 6.0, 8.0, 10.0], False)')
mat = DenseMatrix(6, 3, zeros(18))
self.assertTrue(
repr(mat),
'DenseMatrix(6, 3, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ..., \
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], False)')
def test_repr_sparse_matrix(self):
sm1t = SparseMatrix(
3, 4, [0, 2, 3, 5], [0, 1, 2, 0, 2], [3.0, 2.0, 4.0, 9.0, 8.0],
isTransposed=True)
self.assertTrue(
repr(sm1t),
'SparseMatrix(3, 4, [0, 2, 3, 5], [0, 1, 2, 0, 2], [3.0, 2.0, 4.0, 9.0, 8.0], True)')
indices = tile(arange(6), 3)
values = ones(18)
sm = SparseMatrix(6, 3, [0, 6, 12, 18], indices, values)
self.assertTrue(
repr(sm), "SparseMatrix(6, 3, [0, 6, 12, 18], \
[0, 1, 2, 3, 4, 5, 0, 1, ..., 4, 5, 0, 1, 2, 3, 4, 5], \
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ..., \
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], False)")
self.assertTrue(
str(sm),
"6 X 3 CSCMatrix\n\
(0,0) 1.0\n(1,0) 1.0\n(2,0) 1.0\n(3,0) 1.0\n(4,0) 1.0\n(5,0) 1.0\n\
(0,1) 1.0\n(1,1) 1.0\n(2,1) 1.0\n(3,1) 1.0\n(4,1) 1.0\n(5,1) 1.0\n\
(0,2) 1.0\n(1,2) 1.0\n(2,2) 1.0\n(3,2) 1.0\n..\n..")
sm = SparseMatrix(1, 18, zeros(19), [], [])
self.assertTrue(
repr(sm),
'SparseMatrix(1, 18, \
[0, 0, 0, 0, 0, 0, 0, 0, ..., 0, 0, 0, 0, 0, 0, 0, 0], [], [], False)')
def test_sparse_matrix(self):
# Test sparse matrix creation.
sm1 = SparseMatrix(
3, 4, [0, 2, 2, 4, 4], [1, 2, 1, 2], [1.0, 2.0, 4.0, 5.0])
self.assertEqual(sm1.numRows, 3)
self.assertEqual(sm1.numCols, 4)
self.assertEqual(sm1.colPtrs.tolist(), [0, 2, 2, 4, 4])
self.assertEqual(sm1.rowIndices.tolist(), [1, 2, 1, 2])
self.assertEqual(sm1.values.tolist(), [1.0, 2.0, 4.0, 5.0])
self.assertTrue(
repr(sm1),
'SparseMatrix(3, 4, [0, 2, 2, 4, 4], [1, 2, 1, 2], [1.0, 2.0, 4.0, 5.0], False)')
# Test indexing
expected = [
[0, 0, 0, 0],
[1, 0, 4, 0],
[2, 0, 5, 0]]
for i in range(3):
for j in range(4):
self.assertEqual(expected[i][j], sm1[i, j])
self.assertTrue(array_equal(sm1.toArray(), expected))
for i, j in [(-1, 1), (4, 3), (3, 5)]:
self.assertRaises(IndexError, sm1.__getitem__, (i, j))
# Test conversion to dense and sparse.
smnew = sm1.toDense().toSparse()
self.assertEqual(sm1.numRows, smnew.numRows)
self.assertEqual(sm1.numCols, smnew.numCols)
self.assertTrue(array_equal(sm1.colPtrs, smnew.colPtrs))
self.assertTrue(array_equal(sm1.rowIndices, smnew.rowIndices))
self.assertTrue(array_equal(sm1.values, smnew.values))
sm1t = SparseMatrix(
3, 4, [0, 2, 3, 5], [0, 1, 2, 0, 2], [3.0, 2.0, 4.0, 9.0, 8.0],
isTransposed=True)
self.assertEqual(sm1t.numRows, 3)
self.assertEqual(sm1t.numCols, 4)
self.assertEqual(sm1t.colPtrs.tolist(), [0, 2, 3, 5])
self.assertEqual(sm1t.rowIndices.tolist(), [0, 1, 2, 0, 2])
self.assertEqual(sm1t.values.tolist(), [3.0, 2.0, 4.0, 9.0, 8.0])
expected = [
[3, 2, 0, 0],
[0, 0, 4, 0],
[9, 0, 8, 0]]
for i in range(3):
for j in range(4):
self.assertEqual(expected[i][j], sm1t[i, j])
self.assertTrue(array_equal(sm1t.toArray(), expected))
def test_dense_matrix_is_transposed(self):
mat1 = DenseMatrix(3, 2, [0, 4, 1, 6, 3, 9], isTransposed=True)
mat = DenseMatrix(3, 2, [0, 1, 3, 4, 6, 9])
self.assertEqual(mat1, mat)
expected = [[0, 4], [1, 6], [3, 9]]
for i in range(3):
for j in range(2):
self.assertEqual(mat1[i, j], expected[i][j])
self.assertTrue(array_equal(mat1.toArray(), expected))
sm = mat1.toSparse()
self.assertTrue(array_equal(sm.rowIndices, [1, 2, 0, 1, 2]))
self.assertTrue(array_equal(sm.colPtrs, [0, 2, 5]))
self.assertTrue(array_equal(sm.values, [1, 3, 4, 6, 9]))
def test_norms(self):
a = DenseVector([0, 2, 3, -1])
self.assertAlmostEqual(a.norm(2), 3.742, 3)
self.assertTrue(a.norm(1), 6)
self.assertTrue(a.norm(inf), 3)
a = SparseVector(4, [0, 2], [3, -4])
self.assertAlmostEqual(a.norm(2), 5)
self.assertTrue(a.norm(1), 7)
self.assertTrue(a.norm(inf), 4)
tmp = SparseVector(4, [0, 2], [3, 0])
self.assertEqual(tmp.numNonzeros(), 1)
class VectorUDTTests(MLlibTestCase):
dv0 = DenseVector([])
dv1 = DenseVector([1.0, 2.0])
sv0 = SparseVector(2, [], [])
sv1 = SparseVector(2, [1], [2.0])
udt = VectorUDT()
def test_json_schema(self):
self.assertEqual(VectorUDT.fromJson(self.udt.jsonValue()), self.udt)
def test_serialization(self):
for v in [self.dv0, self.dv1, self.sv0, self.sv1]:
self.assertEqual(v, self.udt.deserialize(self.udt.serialize(v)))
def test_infer_schema(self):
rdd = self.sc.parallelize([Row(label=1.0, features=self.dv1),
Row(label=0.0, features=self.sv1)])
df = rdd.toDF()
schema = df.schema
field = [f for f in schema.fields if f.name == "features"][0]
self.assertEqual(field.dataType, self.udt)
vectors = df.rdd.map(lambda p: p.features).collect()
self.assertEqual(len(vectors), 2)
for v in vectors:
if isinstance(v, SparseVector):
self.assertEqual(v, self.sv1)
elif isinstance(v, DenseVector):
self.assertEqual(v, self.dv1)
else:
raise TypeError("expecting a vector but got %r of type %r" % (v, type(v)))
class MatrixUDTTests(MLlibTestCase):
dm1 = DenseMatrix(3, 2, [0, 1, 4, 5, 9, 10])
dm2 = DenseMatrix(3, 2, [0, 1, 4, 5, 9, 10], isTransposed=True)
sm1 = SparseMatrix(1, 1, [0, 1], [0], [2.0])
sm2 = SparseMatrix(2, 1, [0, 0, 1], [0], [5.0], isTransposed=True)
udt = MatrixUDT()
def test_json_schema(self):
self.assertEqual(MatrixUDT.fromJson(self.udt.jsonValue()), self.udt)
def test_serialization(self):
for m in [self.dm1, self.dm2, self.sm1, self.sm2]:
self.assertEqual(m, self.udt.deserialize(self.udt.serialize(m)))
def test_infer_schema(self):
rdd = self.sc.parallelize([("dense", self.dm1), ("sparse", self.sm1)])
df = rdd.toDF()
schema = df.schema
self.assertTrue(schema.fields[1].dataType, self.udt)
matrices = df.rdd.map(lambda x: x._2).collect()
self.assertEqual(len(matrices), 2)
for m in matrices:
if isinstance(m, DenseMatrix):
self.assertTrue(m, self.dm1)
elif isinstance(m, SparseMatrix):
self.assertTrue(m, self.sm1)
else:
raise ValueError("Expected a matrix but got type %r" % type(m))
class WrapperTests(MLlibTestCase):
def test_new_java_array(self):
# test array of strings
str_list = ["a", "b", "c"]
java_class = self.sc._gateway.jvm.java.lang.String
java_array = JavaWrapper._new_java_array(str_list, java_class)
self.assertEqual(_java2py(self.sc, java_array), str_list)
# test array of integers
int_list = [1, 2, 3]
java_class = self.sc._gateway.jvm.java.lang.Integer
java_array = JavaWrapper._new_java_array(int_list, java_class)
self.assertEqual(_java2py(self.sc, java_array), int_list)
# test array of floats
float_list = [0.1, 0.2, 0.3]
java_class = self.sc._gateway.jvm.java.lang.Double
java_array = JavaWrapper._new_java_array(float_list, java_class)
self.assertEqual(_java2py(self.sc, java_array), float_list)
# test array of bools
bool_list = [False, True, True]
java_class = self.sc._gateway.jvm.java.lang.Boolean
java_array = JavaWrapper._new_java_array(bool_list, java_class)
self.assertEqual(_java2py(self.sc, java_array), bool_list)
# test array of Java DenseVectors
v1 = DenseVector([0.0, 1.0])
v2 = DenseVector([1.0, 0.0])
vec_java_list = [_py2java(self.sc, v1), _py2java(self.sc, v2)]
java_class = self.sc._gateway.jvm.org.apache.spark.ml.linalg.DenseVector
java_array = JavaWrapper._new_java_array(vec_java_list, java_class)
self.assertEqual(_java2py(self.sc, java_array), [v1, v2])
# test empty array
java_class = self.sc._gateway.jvm.java.lang.Integer
java_array = JavaWrapper._new_java_array([], java_class)
self.assertEqual(_java2py(self.sc, java_array), [])
class ChiSquareTestTests(SparkSessionTestCase):
def test_chisquaretest(self):
data = [[0, Vectors.dense([0, 1, 2])],
[1, Vectors.dense([1, 1, 1])],
[2, Vectors.dense([2, 1, 0])]]
df = self.spark.createDataFrame(data, ['label', 'feat'])
res = ChiSquareTest.test(df, 'feat', 'label')
# This line is hitting the collect bug described in #17218, commented for now.
# pValues = res.select("degreesOfFreedom").collect())
self.assertIsInstance(res, DataFrame)
fieldNames = set(field.name for field in res.schema.fields)
expectedFields = ["pValues", "degreesOfFreedom", "statistics"]
self.assertTrue(all(field in fieldNames for field in expectedFields))
class UnaryTransformerTests(SparkSessionTestCase):
def test_unary_transformer_validate_input_type(self):
shiftVal = 3
transformer = MockUnaryTransformer(shiftVal=shiftVal)\
.setInputCol("input").setOutputCol("output")
# should not raise any errors
transformer.validateInputType(DoubleType())
with self.assertRaises(TypeError):
# passing the wrong input type should raise an error
transformer.validateInputType(IntegerType())
def test_unary_transformer_transform(self):
shiftVal = 3
transformer = MockUnaryTransformer(shiftVal=shiftVal)\
.setInputCol("input").setOutputCol("output")
df = self.spark.range(0, 10).toDF('input')
df = df.withColumn("input", df.input.cast(dataType="double"))
transformed_df = transformer.transform(df)
results = transformed_df.select("input", "output").collect()
for res in results:
self.assertEqual(res.input + shiftVal, res.output)
if __name__ == "__main__":
from pyspark.ml.tests import *
if xmlrunner:
unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
else:
unittest.main()
|
adrian-ionescu/apache-spark
|
python/pyspark/ml/tests.py
|
Python
|
apache-2.0
| 99,066
|
[
"Gaussian"
] |
5ee57300719831913ff00af90c559b3a469f902cc8d6130444c9c836235935ad
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Python -*-
"""
Chat program
ROSPEEX あるいは jsk ROS Voice Recognition
から入力された文章を使い、DoCoMoAPIで会話する
The project is hosted on GitHub where your could fork the project or report
issues. Visit https://github.com/roboworks/
:copyright: (c) 2015,2016 by Hiroyuki Okada, All rights reserved.
:license: MIT License (MIT), http://www.opensource.org/licenses/MIT
"""
__author__ = 'Hiroyuki Okada'
__version__ = '0.2'
import sys
import string
import time
import datetime
import re
sys.path.append(".")
import urllib2
import urllib
import json
import rospy
from std_msgs.msg import String
# rospeex
from rospeex_if import ROSpeexInterface
# jsk
from jsk_gui_msgs.msg import VoiceMessage
from jsk_gui_msgs.msg import Tablet
# trcp
from trcp_chat.srv import *
from trcp_chat.msg import *
_chat_={
"utt":"",
"context":"aaabbbccc111222333",
"nickname":"あかね",
"nickname_y":"アカネ",
"sex":"女",
"bloodtype":"O",
"birthdateY":1990,
"birthdateM":2,
"birthdateD":5,
"age":25,
"constellations":"水瓶",
"place":"大阪",
"mode":"dialog",
"t":"20"
}
class ChatTRCP(object):
""" ChatTRCP class """
def __init__(self):
""" Initializer """
def run(self):
""" run ros node """
# initialize ros node
rospy.init_node('ChatTRCP')
rospy.loginfo("start DoCoMo Chat TRCP node")
""" for ROSpeexInterface """
self.rospeex = ROSpeexInterface()
self.rospeex.init()
self.rospeex.register_sr_response( self.sr_response )
self.rospeex.set_spi_config(language='ja', engine='nict')
"""日本語(英語もある)でNICT(Googleもある)"""
"""launchファイ決めてもいいけど、動的に変更する?"""
"""とりあえず、現状は決め打ち"""
self.lang = 'ja'
self.input_engine = 'nict'
self.rospeex.set_spi_config(language='ja',engine='nict')
""" for jsk voice understanding """
rospy.Subscriber("/Tablet/voice", VoiceMessage, self.jsk_voice)
""" 発話理解APIの準備 """
self.req = DoCoMoUnderstandingReq()
self.req.projectKey = rospy.get_param("~req_projectKey", 'OSU')
self.req.appName = rospy.get_param("~req_appName" ,'')
self.req.appKey = rospy.get_param("~req_appKey" ,'hoge_app01')
self.req.clientVer = rospy.get_param("~req_clientVer", '1.0.0')
self.req.dialogMode = rospy.get_param("~req_dialogMode",'off')
self.req.language = rospy.get_param("~req_language", 'ja')
self.req.userId = rospy.get_param("~req_userId", '12 123456 123456 0')
self.req.lat = rospy.get_param("~req_lat" , '139.766084')
self.req.lon = rospy.get_param("~req_lon" , '35.681382')
""" 雑談対話APIの準備 """
self.req_chat = DoCoMoChatReq()
self.req_chat.utt = ""
self.req_chat.context = rospy.get_param("~context", "aaabbbccc111222333")
self.req_chat.nickname = rospy.get_param("~nickname", "ひろゆき")
self.req_chat.nickname_y = rospy.get_param("~nickname_y", "ヒロユキ")
self.req_chat.sex = rospy.get_param("~sex", "男")
self.req_chat.bloodtype = rospy.get_param("~bloodtype", "AB")
self.req_chat.birthdateY = rospy.get_param("~birthdateY", "1960")
self.req_chat.birthdateM = rospy.get_param("~birthdateM", "7")
self.req_chat.birthdateD = rospy.get_param("~birthdateD", "11")
self.req_chat.age = rospy.get_param("~age", "56")
self.req_chat.constellations = rospy.get_param("~constellations", "蟹")
self.req_chat.place = rospy.get_param("~place", "東京")
self.req_chat.mode = rospy.get_param("~mode", "dialog")
self.req_chat.t = rospy.get_param("~t", "20")
""" サービスの起動 """
rospy.wait_for_service('docomo_sentenceunderstanding')
self.understanding = rospy.ServiceProxy('docomo_sentenceunderstanding',DoCoMoUnderstanding)
rospy.wait_for_service('docomo_qa')
self.qa = rospy.ServiceProxy('docomo_qa',DoCoMoQa)
rospy.wait_for_service('docomo_chat')
self.chat = rospy.ServiceProxy('docomo_chat',DoCoMoChat)
self.resp_understanding = DoCoMoUnderstandingRes()
self.nowmode = "CHAT"
rospy.spin()
def trcpSay(self, text):
self.rospeex.say(text, 'ja', 'nict')
def jsk_voice(self,data):
# print len(data.texts)
# for elem in data.texts:
# print elem
rospy.loginfo("jsk_voice:%s", data.texts[0])
self.execTrcpChat(data.texts[0])
def sr_response(self, message):
# Rospeexを使うと、文字列の最後に「。」が付くので削除する
src = message
sr_dst=src.replace('。', '')
rospy.loginfo("rospeex:%s", sr_dst)
self.execTrcpChat(sr_dst)
""" DoCoMo 知識検索の実行 """
def execDoCoMoQA(self, message):
rospy.loginfo("DoCoMo Q&A")
self.req_qa = DoCoMoQaReq()
self.req_qa.text = message
res_qa = self.qa(self.req_qa)
if res_qa.success:
# for answer in res_qa.response.answer:
# print answer.rank
# print answer.answerText
# print answer.linkText
# print answer.linkUrl
"""
質問回答のレスポンスコードは、下記のいずれかを返却。
S020000: 内部のDBからリストアップした回答
S020001: 知識Q&A APIが計算した回答
S020010: 外部サイトから抽出した回答候補
S020011: 外部サイトへのリンクを回答
E010000: 回答不能(パラメータ不備)
E020000: 回答不能(結果0件)
E099999: 回答不能(処理エラー)
※Sで始まる場合は正常回答、
Eで始まる場合は回答が得られていないことを示す。
"""
rospy.loginfo("DoCoMo Q&A response code:%s",res_qa.response.code)
if res_qa.response.code == 'S020000':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay(res_qa.response.textForSpeech)
elif res_qa.response.code == 'S020001':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay(res_qa.response.textForSpeech)
elif res_qa.response.code == 'S020010':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay(res_qa.response.textForSpeech)
elif res_qa.response.code == 'S020011':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay(res_qa.response.textForSpeech)
elif res_qa.response.code == 'E010000':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay("ごめんなさい、答えが見つかりませんでした")
elif res_qa.response.code == 'E020000':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay("ごめんなさい、答えが見つかりませんでした")
elif res_qa.response.code == 'E099999':
rospy.loginfo("DoCoMo Q&A response:%s",res_qa.response.textForDisplay)
self.trcpSay("ごめんなさい、答えが見つかりませんでした")
else:
pass
else:
rospy.loginfo("DoCoMo Q&A response:%s","system error")
return False
return True
def execTrcpChat(self, message):
rospy.loginfo("chat:%s", message)
#message が特定のキーワードであれば、それに対応した処理を行う
""" 時間 ->現在時刻を答える"""
time = re.compile('(?P<time>何時)').search(message)
if time is not None:
rospy.loginfo("What Time is it now? :%s", message)
d = datetime.datetime.today()
text = u'%d時%d分です。'%(d.hour, d.minute)
self.trcpSay(text)
return True
# 特定のキーワード処理はここまで
print self.nowmode
try:
""" もし現在の会話モードが「しりとり」なら
文章理解APIをスキップする
それ以外なら、文章理解APIで文章を解析する
"""
if self.nowmode == "CHAIN":
self.resp_understanding.success = True
self.resp_understanding.response.commandId = "BC00101"
self.resp_understanding.response.utteranceText = message
else:
self.req.utteranceText = message
self.resp_understanding = self.understanding(self.req)
if self.resp_understanding.success:
commandId = self.resp_understanding.response.commandId
rospy.loginfo("<< %s", commandId)
if commandId == "BC00101":
"""雑談"""
rospy.loginfo("DoCoMo Chat")
self.req_chat.utt = message
self.res_chat = self.chat(self.req_chat)
rospy.loginfo("DoCoMo Chat response:%s",self.res_chat.response)
"""雑談対話からのレスポンスを設定する"""
self.req_chat.mode = self.res_chat.response.mode.encode('utf-8')
self.req_chat.context = self.res_chat.response.context.encode('utf-8')
if self.nowmode == "CHAIN":
if self.res_chat.response.mode == "srtr":
print self.nowmode
self.nowmode = "CHAIN"
self.trcpSay(self.res_chat.response.utt)
else:
print self.nowmode
self.nowmode = "CHAT"
self.trcpSay(self.res_chat.response.utt)
elif self.nowmode == "CHAT":
if self.res_chat.response.mode == "srtr":
print self.nowmode
self.nowmode = "CHAIN"
self.trcpSay(self.res_chat.response.utt)
else:
print self.nowmode
self.nowmode = "CHAT"
rospy.loginfo("TRCP Chat response:%s",self.res_chat.response.yomi)
self.trcpSay(self.res_chat.response.yomi)
elif commandId == "BK00101":
"""知識検索"""
rospy.loginfo(":Q&A")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00101":
"""乗換案内"""
rospy.loginfo(":Transfer")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00201":
"""地図"""
rospy.loginfo(":Map")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00301":
"""天気"""
rospy.loginfo(":Weather")
"""お天気検索"""
"""http://weather.livedoor.com/weather_hacks/webservice"""
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00401":
"""グルメ検索"""
rospy.loginfo(":Restaurant")
""" グルなびWebサービス"""
"""http://api.gnavi.co.jp/api/"""
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00501":
"""ブラウザ"""
rospy.loginfo(":Webpage")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00601":
"""観光案内"""
rospy.loginfo(":Sightseeing")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00701":
"""カメラ"""
rospy.loginfo(":Camera")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00801":
"""ギャラリー"""
rospy.loginfo(":Gallery")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT00901":
"""通信"""
rospy.loginfo(":Coomunincation")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01001":
"""メール"""
rospy.loginfo(":Mail")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01101":
"""メモ登録"""
rospy.loginfo(":Memo input")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01102":
"""メモ参照"""
rospy.loginfo(":Memo output")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01201":
"""アラーム"""
rospy.loginfo(":Alarm")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01301":
"""スケジュール登録"""
rospy.loginfo(":Schedule input")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01302":
"""スケジュール参照"""
rospy.loginfo(":Schedule input")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01501":
"""端末設定"""
rospy.loginfo(":Setting")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT01601":
"""SNS投稿"""
rospy.loginfo(":SNS")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BT90101":
"""キャンセル"""
rospy.loginfo(":Cancel")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BM00101":
"""地図乗換"""
rospy.loginfo(":Map transfer")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
elif commandId == "BM00201":
"""通話メール"""
rospy.loginfo(":Short mail")
self.execDoCoMoQA(self.resp_understanding.response.utteranceText)
else:
"""発話理解APIで判定不能"""
"""Undeterminable"""
rospy.loginfo("Undeterminable:%s",self.resp_understanding.response.commandId)
self.trcpSay("ごめんなさい、良く聞き取れませんでした。")
else:
"""発話理解APIがエラーのとき"""
rospy.loginfo("DoCoMo 発話理解API failed")
pass
except:
"""対話プログラムのどこかでエラーのとき"""
rospy.loginfo("error")
pass
return True
if __name__ == '__main__':
try:
node = ChatTRCP()
node.run()
except rospy.ROSInterruptException:
pass
|
okadahiroyuki/trcp
|
trcp_chat/nodes/ChatTRCP.py
|
Python
|
mit
| 16,700
|
[
"VisIt"
] |
6ebb484ba6674d4a80f76d0917340397aad7c7167bf7dbc32e7ece15871f668e
|
#!/usr/bin/env python
import argparse
import binascii
import copy
import datetime
import hashlib
import json
import logging
import os
import shutil
import struct
import subprocess
import tempfile
import xml.etree.ElementTree as ET
from collections import defaultdict
from Bio.Data import CodonTable
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('jbrowse')
TODAY = datetime.datetime.now().strftime("%Y-%m-%d")
GALAXY_INFRASTRUCTURE_URL = None
class ColorScaling(object):
COLOR_FUNCTION_TEMPLATE = """
function(feature, variableName, glyphObject, track) {{
var score = {score};
{opacity}
return 'rgba({red}, {green}, {blue}, ' + opacity + ')';
}}
"""
COLOR_FUNCTION_TEMPLATE_QUAL = r"""
function(feature, variableName, glyphObject, track) {{
var search_up = function self(sf, attr){{
if(sf.get(attr) !== undefined){{
return sf.get(attr);
}}
if(sf.parent() === undefined) {{
return;
}}else{{
return self(sf.parent(), attr);
}}
}};
var search_down = function self(sf, attr){{
if(sf.get(attr) !== undefined){{
return sf.get(attr);
}}
if(sf.children() === undefined) {{
return;
}}else{{
var kids = sf.children();
for(var child_idx in kids){{
var x = self(kids[child_idx], attr);
if(x !== undefined){{
return x;
}}
}}
return;
}}
}};
var color = ({user_spec_color} || search_up(feature, 'color') || search_down(feature, 'color') || {auto_gen_color});
var score = (search_up(feature, 'score') || search_down(feature, 'score'));
{opacity}
if(score === undefined){{ opacity = 1; }}
var result = /^#?([a-f\d]{{2}})([a-f\d]{{2}})([a-f\d]{{2}})$/i.exec(color);
var red = parseInt(result[1], 16);
var green = parseInt(result[2], 16);
var blue = parseInt(result[3], 16);
if(isNaN(opacity) || opacity < 0){{ opacity = 0; }}
return 'rgba(' + red + ',' + green + ',' + blue + ',' + opacity + ')';
}}
"""
OPACITY_MATH = {
'linear': """
var opacity = (score - ({min})) / (({max}) - ({min}));
""",
'logarithmic': """
var opacity = Math.log10(score - ({min})) / Math.log10(({max}) - ({min}));
""",
'blast': """
var opacity = 0;
if(score == 0.0) {{
opacity = 1;
}} else {{
opacity = (20 - Math.log10(score)) / 180;
}}
"""
}
BREWER_COLOUR_IDX = 0
BREWER_COLOUR_SCHEMES = [
(166, 206, 227),
(31, 120, 180),
(178, 223, 138),
(51, 160, 44),
(251, 154, 153),
(227, 26, 28),
(253, 191, 111),
(255, 127, 0),
(202, 178, 214),
(106, 61, 154),
(255, 255, 153),
(177, 89, 40),
(228, 26, 28),
(55, 126, 184),
(77, 175, 74),
(152, 78, 163),
(255, 127, 0),
]
BREWER_DIVERGING_PALLETES = {
'BrBg': ("#543005", "#003c30"),
'PiYg': ("#8e0152", "#276419"),
'PRGn': ("#40004b", "#00441b"),
'PuOr': ("#7f3b08", "#2d004b"),
'RdBu': ("#67001f", "#053061"),
'RdGy': ("#67001f", "#1a1a1a"),
'RdYlBu': ("#a50026", "#313695"),
'RdYlGn': ("#a50026", "#006837"),
'Spectral': ("#9e0142", "#5e4fa2"),
}
def __init__(self):
self.brewer_colour_idx = 0
def rgb_from_hex(self, hexstr):
# http://stackoverflow.com/questions/4296249/how-do-i-convert-a-hex-triplet-to-an-rgb-tuple-and-back
return struct.unpack('BBB', binascii.unhexlify(hexstr))
def min_max_gff(self, gff_file):
min_val = None
max_val = None
with open(gff_file, 'r') as handle:
for line in handle:
try:
value = float(line.split('\t')[5])
min_val = min(value, (min_val or value))
max_val = max(value, (max_val or value))
if value < min_val:
min_val = value
if value > max_val:
max_val = value
except Exception:
pass
return min_val, max_val
def hex_from_rgb(self, r, g, b):
return '#%02x%02x%02x' % (r, g, b)
def _get_colours(self):
r, g, b = self.BREWER_COLOUR_SCHEMES[self.brewer_colour_idx % len(self.BREWER_COLOUR_SCHEMES)]
self.brewer_colour_idx += 1
return r, g, b
def parse_menus(self, track):
trackConfig = {'menuTemplate': [{}, {}, {}, {}]}
if 'menu' in track['menus']:
menu_list = [track['menus']['menu']]
if isinstance(track['menus']['menu'], list):
menu_list = track['menus']['menu']
for m in menu_list:
tpl = {
'action': m['action'],
'label': m.get('label', '{name}'),
'iconClass': m.get('iconClass', 'dijitIconBookmark'),
}
if 'url' in m:
tpl['url'] = m['url']
if 'content' in m:
tpl['content'] = m['content']
if 'title' in m:
tpl['title'] = m['title']
trackConfig['menuTemplate'].append(tpl)
return trackConfig
def parse_colours(self, track, trackFormat, gff3=None):
# Wiggle tracks have a bicolor pallete
trackConfig = {'style': {}}
if trackFormat == 'wiggle':
trackConfig['style']['pos_color'] = track['wiggle']['color_pos']
trackConfig['style']['neg_color'] = track['wiggle']['color_neg']
if trackConfig['style']['pos_color'] == '__auto__':
trackConfig['style']['neg_color'] = self.hex_from_rgb(*self._get_colours())
trackConfig['style']['pos_color'] = self.hex_from_rgb(*self._get_colours())
# Wiggle tracks can change colour at a specified place
bc_pivot = track['wiggle']['bicolor_pivot']
if bc_pivot not in ('mean', 'zero'):
# The values are either one of those two strings
# or a number
bc_pivot = float(bc_pivot)
trackConfig['bicolor_pivot'] = bc_pivot
elif 'scaling' in track:
if track['scaling']['method'] == 'ignore':
if track['scaling']['scheme']['color'] != '__auto__':
trackConfig['style']['color'] = track['scaling']['scheme']['color']
else:
trackConfig['style']['color'] = self.hex_from_rgb(*self._get_colours())
else:
# Scored method
algo = track['scaling']['algo']
# linear, logarithmic, blast
scales = track['scaling']['scales']
# type __auto__, manual (min, max)
scheme = track['scaling']['scheme']
# scheme -> (type (opacity), color)
# ==================================
# GENE CALLS OR BLAST
# ==================================
if trackFormat == 'blast':
red, green, blue = self._get_colours()
color_function = self.COLOR_FUNCTION_TEMPLATE.format(**{
'score': "feature._parent.get('score')",
'opacity': self.OPACITY_MATH['blast'],
'red': red,
'green': green,
'blue': blue,
})
trackConfig['style']['color'] = color_function.replace('\n', '')
elif trackFormat == 'gene_calls':
# Default values, based on GFF3 spec
min_val = 0
max_val = 1000
# Get min/max and build a scoring function since JBrowse doesn't
if scales['type'] == 'automatic' or scales['type'] == '__auto__':
min_val, max_val = self.min_max_gff(gff3)
else:
min_val = scales.get('min', 0)
max_val = scales.get('max', 1000)
if scheme['color'] == '__auto__':
user_color = 'undefined'
auto_color = "'%s'" % self.hex_from_rgb(*self._get_colours())
elif scheme['color'].startswith('#'):
user_color = "'%s'" % self.hex_from_rgb(*self.rgb_from_hex(scheme['color'][1:]))
auto_color = 'undefined'
else:
user_color = 'undefined'
auto_color = "'%s'" % self.hex_from_rgb(*self._get_colours())
color_function = self.COLOR_FUNCTION_TEMPLATE_QUAL.format(**{
'opacity': self.OPACITY_MATH[algo].format(**{'max': max_val, 'min': min_val}),
'user_spec_color': user_color,
'auto_gen_color': auto_color,
})
trackConfig['style']['color'] = color_function.replace('\n', '')
return trackConfig
def etree_to_dict(t):
if t is None:
return {}
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.items():
dd[k].append(v)
d = {t.tag: {k: v[0] if len(v) == 1 else v for k, v in dd.items()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.items())
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
# score comes from feature._parent.get('score') or feature.get('score')
INSTALLED_TO = os.path.dirname(os.path.realpath(__file__))
def metadata_from_node(node):
metadata = {}
try:
if len(node.findall('dataset')) != 1:
# exit early
return metadata
except Exception:
return {}
for (key, value) in node.findall('dataset')[0].attrib.items():
metadata['dataset_%s' % key] = value
for (key, value) in node.findall('history')[0].attrib.items():
metadata['history_%s' % key] = value
for (key, value) in node.findall('metadata')[0].attrib.items():
metadata['metadata_%s' % key] = value
for (key, value) in node.findall('tool')[0].attrib.items():
metadata['tool_%s' % key] = value
# Additional Mappings applied:
metadata['dataset_edam_format'] = '<a target="_blank" href="http://edamontology.org/{0}">{1}</a>'.format(metadata['dataset_edam_format'], metadata['dataset_file_ext'])
metadata['history_user_email'] = '<a href="mailto:{0}">{0}</a>'.format(metadata['history_user_email'])
metadata['history_display_name'] = '<a target="_blank" href="{galaxy}/history/view/{encoded_hist_id}">{hist_name}</a>'.format(
galaxy=GALAXY_INFRASTRUCTURE_URL,
encoded_hist_id=metadata['history_id'],
hist_name=metadata['history_display_name']
)
metadata['tool_tool'] = '<a target="_blank" href="{galaxy}/datasets/{encoded_id}/show_params">{tool_id}</a>'.format(
galaxy=GALAXY_INFRASTRUCTURE_URL,
encoded_id=metadata['dataset_id'],
tool_id=metadata['tool_tool_id'],
# tool_version=metadata['tool_tool_version'],
)
return metadata
class JbrowseConnector(object):
def __init__(self, jbrowse, outdir, genomes, standalone=None, gencode=1):
self.cs = ColorScaling()
self.jbrowse = jbrowse
self.outdir = outdir
self.genome_paths = genomes
self.standalone = standalone
self.gencode = gencode
self.tracksToIndex = []
if standalone == "complete":
self.clone_jbrowse(self.jbrowse, self.outdir)
elif standalone == "minimal":
self.clone_jbrowse(self.jbrowse, self.outdir, minimal=True)
else:
try:
os.makedirs(self.outdir)
except OSError:
# Ignore if the folder exists
pass
try:
os.makedirs(os.path.join(self.outdir, 'data', 'raw'))
except OSError:
# Ignore if the folder exists
pass
self.process_genomes()
self.update_gencode()
def update_gencode(self):
table = CodonTable.unambiguous_dna_by_id[int(self.gencode)]
trackList = os.path.join(self.outdir, 'data', 'trackList.json')
with open(trackList, 'r') as handle:
trackListData = json.load(handle)
trackListData['tracks'][0].update({
'codonStarts': table.start_codons,
'codonStops': table.stop_codons,
'codonTable': table.forward_table,
})
with open(trackList, 'w') as handle:
json.dump(trackListData, handle, indent=2)
def subprocess_check_call(self, command, output=None):
if output:
log.debug('cd %s && %s > %s', self.outdir, ' '.join(command), output)
subprocess.check_call(command, cwd=self.outdir, stdout=output)
else:
log.debug('cd %s && %s', self.outdir, ' '.join(command))
subprocess.check_call(command, cwd=self.outdir)
def subprocess_popen(self, command):
log.debug('cd %s && %s', self.outdir, command)
p = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = p.communicate()
retcode = p.returncode
if retcode != 0:
log.error('cd %s && %s', self.outdir, command)
log.error(output)
log.error(err)
raise RuntimeError("Command failed with exit code %s" % (retcode))
def subprocess_check_output(self, command):
log.debug('cd %s && %s', self.outdir, ' '.join(command))
return subprocess.check_output(command, cwd=self.outdir)
def _jbrowse_bin(self, command):
return os.path.realpath(os.path.join(self.jbrowse, 'bin', command))
def symlink_or_copy(self, src, dest):
if 'GALAXY_JBROWSE_SYMLINKS' in os.environ and bool(os.environ['GALAXY_JBROWSE_SYMLINKS']):
cmd = ['ln', '-s', src, dest]
else:
cmd = ['cp', src, dest]
return self.subprocess_check_call(cmd)
def process_genomes(self):
for genome_node in self.genome_paths:
# We only expect one input genome per run. This for loop is just
# easier to write than the alternative / catches any possible
# issues.
# Copy the file in workdir, prepare-refseqs.pl will copy it to jbrowse's data dir
local_genome = os.path.realpath('./genome.fasta')
shutil.copy(genome_node['path'], local_genome)
cmd = ['samtools', 'faidx', local_genome]
self.subprocess_check_call(cmd)
self.subprocess_check_call([
'perl', self._jbrowse_bin('prepare-refseqs.pl'),
'--trackConfig', json.dumps({'metadata': genome_node['meta']}),
'--indexed_fasta', os.path.realpath(local_genome)])
os.unlink(local_genome)
os.unlink(local_genome + '.fai')
def generate_names(self):
# Generate names
args = [
'perl', self._jbrowse_bin('generate-names.pl'),
'--hashBits', '16'
]
tracks = ','.join(self.tracksToIndex)
if tracks:
args += ['--tracks', tracks]
else:
# No tracks to index, index only the refseq
args += ['--tracks', 'DNA']
self.subprocess_check_call(args)
def _add_json(self, json_data):
cmd = [
'perl', self._jbrowse_bin('add-json.pl'),
json.dumps(json_data),
os.path.join('data', 'trackList.json')
]
self.subprocess_check_call(cmd)
def _add_track_json(self, json_data):
if len(json_data) == 0:
return
tmp = tempfile.NamedTemporaryFile(delete=False)
json.dump(json_data, tmp)
tmp.close()
cmd = ['perl', self._jbrowse_bin('add-track-json.pl'), tmp.name,
os.path.join('data', 'trackList.json')]
self.subprocess_check_call(cmd)
os.unlink(tmp.name)
def _blastxml_to_gff3(self, xml, min_gap=10):
gff3_unrebased = tempfile.NamedTemporaryFile(delete=False)
cmd = ['python', os.path.join(INSTALLED_TO, 'blastxml_to_gapped_gff3.py'),
'--trim', '--trim_end', '--include_seq', '--min_gap', str(min_gap), xml]
log.debug('cd %s && %s > %s', self.outdir, ' '.join(cmd), gff3_unrebased.name)
subprocess.check_call(cmd, cwd=self.outdir, stdout=gff3_unrebased)
gff3_unrebased.close()
return gff3_unrebased.name
def add_blastxml(self, data, trackData, blastOpts, **kwargs):
gff3 = self._blastxml_to_gff3(data, min_gap=blastOpts['min_gap'])
if 'parent' in blastOpts and blastOpts['parent'] != 'None':
gff3_rebased = tempfile.NamedTemporaryFile(delete=False)
cmd = ['python', os.path.join(INSTALLED_TO, 'gff3_rebase.py')]
if blastOpts.get('protein', 'false') == 'true':
cmd.append('--protein2dna')
cmd.extend([os.path.realpath(blastOpts['parent']), gff3])
log.debug('cd %s && %s > %s', self.outdir, ' '.join(cmd), gff3_rebased.name)
subprocess.check_call(cmd, cwd=self.outdir, stdout=gff3_rebased)
gff3_rebased.close()
# Replace original gff3 file
shutil.copy(gff3_rebased.name, gff3)
os.unlink(gff3_rebased.name)
dest = os.path.join(self.outdir, 'data', 'raw', trackData['label'] + '.gff')
self._sort_gff(gff3, dest)
url = os.path.join('raw', trackData['label'] + '.gff.gz')
trackData.update({
"urlTemplate": url,
"storeClass": "JBrowse/Store/SeqFeature/GFF3Tabix",
})
trackData['glyph'] = 'JBrowse/View/FeatureGlyph/Segments'
trackData['trackType'] = 'BlastView/View/Track/CanvasFeatures'
trackData['type'] = 'BlastView/View/Track/CanvasFeatures'
self._add_track_json(trackData)
os.unlink(gff3)
if blastOpts.get('index', 'false') == 'true':
self.tracksToIndex.append("%s" % trackData['label'])
def add_bigwig(self, data, trackData, wiggleOpts, **kwargs):
dest = os.path.join('data', 'raw', trackData['label'] + '.bw')
self.symlink_or_copy(os.path.realpath(data), dest)
url = os.path.join('raw', trackData['label'] + '.bw')
trackData.update({
"urlTemplate": url,
"storeClass": "JBrowse/Store/SeqFeature/BigWig",
"type": "JBrowse/View/Track/Wiggle/Density",
})
trackData['type'] = wiggleOpts['type']
trackData['variance_band'] = True if wiggleOpts['variance_band'] == 'true' else False
if 'min' in wiggleOpts and 'max' in wiggleOpts:
trackData['min_score'] = wiggleOpts['min']
trackData['max_score'] = wiggleOpts['max']
else:
trackData['autoscale'] = wiggleOpts.get('autoscale', 'local')
trackData['scale'] = wiggleOpts['scale']
self._add_track_json(trackData)
def add_bigwig_multiple(self, data, trackData, wiggleOpts, **kwargs):
urls = []
for idx, bw in enumerate(data):
dest = os.path.join('data', 'raw', trackData['label'] + '_' + str(idx) + '.bw')
self.symlink_or_copy(bw[1], dest)
urls.append({"url": os.path.join('raw', trackData['label'] + '_' + str(idx) + '.bw'), "name": str(idx + 1) + ' - ' + bw[0]})
trackData.update({
"urlTemplates": urls,
"showTooltips": "true",
"storeClass": "MultiBigWig/Store/SeqFeature/MultiBigWig",
"type": "MultiBigWig/View/Track/MultiWiggle/MultiDensity",
})
if 'XYPlot' in wiggleOpts['type']:
trackData['type'] = "MultiBigWig/View/Track/MultiWiggle/MultiXYPlot"
trackData['variance_band'] = True if wiggleOpts['variance_band'] == 'true' else False
if 'min' in wiggleOpts and 'max' in wiggleOpts:
trackData['min_score'] = wiggleOpts['min']
trackData['max_score'] = wiggleOpts['max']
else:
trackData['autoscale'] = wiggleOpts.get('autoscale', 'local')
trackData['scale'] = wiggleOpts['scale']
self._add_track_json(trackData)
def add_maf(self, data, trackData, mafOpts, **kwargs):
script = os.path.realpath(os.path.join(self.jbrowse, 'plugins', 'MAFViewer', 'bin', 'maf2bed.pl'))
dest = os.path.join('data', 'raw', trackData['label'] + '.txt')
tmp1 = tempfile.NamedTemporaryFile(delete=False)
tmp1.close()
# Process MAF to bed-like
cmd = [script, data]
self.subprocess_check_call(cmd, output=tmp1.path)
# Sort / Index it
self._sort_bed(tmp1.path, dest)
# Cleanup
try:
os.remove(tmp1.path)
except OSError:
pass
# Construct samples list
# We could get this from galaxy metadata, not sure how easily.
ps = subprocess.Popen(['grep', '^s [^ ]*', '-o', data], stdout=subprocess.PIPE)
output = subprocess.check_output(('sort', '-u'), stdin=ps.stdout)
ps.wait()
samples = [x[2:] for x in output]
trackData.update({
"storeClass": "MAFViewer/Store/SeqFeature/MAFTabix",
"type": "MAFViewer/View/Track/MAF",
"urlTemplate": trackData['label'] + '.txt.gz',
"samples": samples,
})
self._add_track_json(trackData)
def add_bam(self, data, trackData, bamOpts, bam_index=None, **kwargs):
dest = os.path.join('data', 'raw', trackData['label'] + '.bam')
self.symlink_or_copy(os.path.realpath(data), dest)
self.symlink_or_copy(os.path.realpath(bam_index), dest + '.bai')
url = os.path.join('raw', trackData['label'] + '.bam')
trackData.update({
"urlTemplate": url,
"type": "JBrowse/View/Track/Alignments2",
"storeClass": "JBrowse/Store/SeqFeature/BAM",
"chunkSizeLimit": bamOpts.get('chunkSizeLimit', '5000000')
})
# Apollo will only switch to the (prettier) 'bam-read' className if it's not set explicitly in the track config
# So remove the default 'feature' value for these bam tracks
if 'className' in trackData['style'] and trackData['style']['className'] == 'feature':
del trackData['style']['className']
self._add_track_json(trackData)
if bamOpts.get('auto_snp', 'false') == 'true':
trackData2 = copy.copy(trackData)
trackData2.update({
"type": "JBrowse/View/Track/SNPCoverage",
"key": trackData['key'] + " - SNPs/Coverage",
"label": trackData['label'] + "_autosnp",
"chunkSizeLimit": bamOpts.get('chunkSizeLimit', '5000000')
})
self._add_track_json(trackData2)
def add_vcf(self, data, trackData, vcfOpts={}, **kwargs):
dest = os.path.join('data', 'raw', trackData['label'] + '.vcf')
# ln?
cmd = ['ln', '-s', data, dest]
self.subprocess_check_call(cmd)
cmd = ['bgzip', dest]
self.subprocess_check_call(cmd)
cmd = ['tabix', '-p', 'vcf', dest + '.gz']
self.subprocess_check_call(cmd)
url = os.path.join('raw', trackData['label'] + '.vcf.gz')
trackData.update({
"urlTemplate": url,
"type": "JBrowse/View/Track/HTMLVariants",
"storeClass": "JBrowse/Store/SeqFeature/VCFTabix",
})
self._add_track_json(trackData)
def _sort_gff(self, data, dest):
# Only index if not already done
if not os.path.exists(dest):
cmd = "gff3sort.pl --precise '%s' | grep -v \"^$\" > '%s'" % (data, dest)
self.subprocess_popen(cmd)
self.subprocess_check_call(['bgzip', '-f', dest])
self.subprocess_check_call(['tabix', '-f', '-p', 'gff', dest + '.gz'])
def _sort_bed(self, data, dest):
# Only index if not already done
if not os.path.exists(dest):
cmd = ['sort', '-k1,1', '-k2,2n', data]
with open(dest, 'w') as handle:
self.subprocess_check_call(cmd, output=handle)
self.subprocess_check_call(['bgzip', '-f', dest])
self.subprocess_check_call(['tabix', '-f', '-p', 'bed', dest + '.gz'])
def add_gff(self, data, format, trackData, gffOpts, **kwargs):
dest = os.path.join(self.outdir, 'data', 'raw', trackData['label'] + '.gff')
self._sort_gff(data, dest)
url = os.path.join('raw', trackData['label'] + '.gff.gz')
trackData.update({
"urlTemplate": url,
"storeClass": "JBrowse/Store/SeqFeature/GFF3Tabix",
})
if 'match' in gffOpts:
trackData['glyph'] = 'JBrowse/View/FeatureGlyph/Segments'
trackType = 'JBrowse/View/Track/CanvasFeatures'
if 'trackType' in gffOpts:
trackType = gffOpts['trackType']
trackData['type'] = trackType
trackData['trackType'] = trackType # Probably only used by old jbrowse versions
if trackType in ['JBrowse/View/Track/CanvasFeatures', 'NeatCanvasFeatures/View/Track/NeatFeatures']:
if 'transcriptType' in gffOpts and gffOpts['transcriptType']:
trackData['transcriptType'] = gffOpts['transcriptType']
if 'subParts' in gffOpts and gffOpts['subParts']:
trackData['subParts'] = gffOpts['subParts']
if 'impliedUTRs' in gffOpts and gffOpts['impliedUTRs']:
trackData['impliedUTRs'] = gffOpts['impliedUTRs']
elif trackType in ['JBrowse/View/Track/HTMLFeatures', 'NeatHTMLFeatures/View/Track/NeatFeatures']:
if 'topLevelFeatures' in gffOpts and gffOpts['topLevelFeatures']:
trackData['topLevelFeatures'] = gffOpts['topLevelFeatures']
self._add_track_json(trackData)
if gffOpts.get('index', 'false') == 'true':
self.tracksToIndex.append("%s" % trackData['label'])
def add_bed(self, data, format, trackData, gffOpts, **kwargs):
dest = os.path.join(self.outdir, 'data', 'raw', trackData['label'] + '.bed')
self._sort_bed(data, dest)
url = os.path.join('raw', trackData['label'] + '.bed.gz')
trackData.update({
"urlTemplate": url,
"storeClass": "JBrowse/Store/SeqFeature/BEDTabix",
})
if 'match' in gffOpts:
trackData['glyph'] = 'JBrowse/View/FeatureGlyph/Segments'
trackType = gffOpts.get('trackType', 'JBrowse/View/Track/CanvasFeatures')
trackData['type'] = trackType
if trackType in ['JBrowse/View/Track/CanvasFeatures', 'NeatCanvasFeatures/View/Track/NeatFeatures']:
if 'transcriptType' in gffOpts and gffOpts['transcriptType']:
trackData['transcriptType'] = gffOpts['transcriptType']
if 'subParts' in gffOpts and gffOpts['subParts']:
trackData['subParts'] = gffOpts['subParts']
if 'impliedUTRs' in gffOpts and gffOpts['impliedUTRs']:
trackData['impliedUTRs'] = gffOpts['impliedUTRs']
elif trackType in ['JBrowse/View/Track/HTMLFeatures', 'NeatHTMLFeatures/View/Track/NeatFeatures']:
if 'topLevelFeatures' in gffOpts and gffOpts['topLevelFeatures']:
trackData['topLevelFeatures'] = gffOpts['topLevelFeatures']
self._add_track_json(trackData)
if gffOpts.get('index', 'false') == 'true':
self.tracksToIndex.append("%s" % trackData['label'])
def add_genbank(self, data, format, trackData, gffOpts, **kwargs):
cmd = [
'perl', self._jbrowse_bin('flatfile-to-json.pl'),
'--genbank', data,
'--trackLabel', trackData['label'],
'--key', trackData['key']
]
# className in --clientConfig is ignored, it needs to be set with --className
if 'className' in trackData['style']:
cmd += ['--className', trackData['style']['className']]
config = copy.copy(trackData)
clientConfig = trackData['style']
del config['style']
if 'match' in gffOpts:
config['glyph'] = 'JBrowse/View/FeatureGlyph/Segments'
if bool(gffOpts['match']):
# Can be empty for CanvasFeatures = will take all by default
cmd += ['--type', gffOpts['match']]
cmd += ['--clientConfig', json.dumps(clientConfig)]
trackType = 'JBrowse/View/Track/CanvasFeatures'
if 'trackType' in gffOpts:
trackType = gffOpts['trackType']
if trackType == 'JBrowse/View/Track/CanvasFeatures':
if 'transcriptType' in gffOpts and gffOpts['transcriptType']:
config['transcriptType'] = gffOpts['transcriptType']
if 'subParts' in gffOpts and gffOpts['subParts']:
config['subParts'] = gffOpts['subParts']
if 'impliedUTRs' in gffOpts and gffOpts['impliedUTRs']:
config['impliedUTRs'] = gffOpts['impliedUTRs']
elif trackType == 'JBrowse/View/Track/HTMLFeatures':
if 'transcriptType' in gffOpts and gffOpts['transcriptType']:
cmd += ['--type', gffOpts['transcriptType']]
cmd += [
'--trackType', gffOpts['trackType']
]
cmd.extend(['--config', json.dumps(config)])
self.subprocess_check_call(cmd)
if gffOpts.get('index', 'false') == 'true':
self.tracksToIndex.append("%s" % trackData['label'])
def add_rest(self, url, trackData):
data = {
"label": trackData['label'],
"key": trackData['key'],
"category": trackData['category'],
"type": "JBrowse/View/Track/HTMLFeatures",
"storeClass": "JBrowse/Store/SeqFeature/REST",
"baseUrl": url
}
self._add_track_json(data)
def add_sparql(self, url, query, trackData):
data = {
"label": trackData['label'],
"key": trackData['key'],
"category": trackData['category'],
"type": "JBrowse/View/Track/CanvasFeatures",
"storeClass": "JBrowse/Store/SeqFeature/SPARQL",
"urlTemplate": url,
"queryTemplate": query
}
self._add_track_json(data)
def traverse_to_option_parent(self, splitKey, outputTrackConfig):
trackConfigSubDict = outputTrackConfig
for part in splitKey[:-1]:
if trackConfigSubDict.get(part) is None:
trackConfigSubDict[part] = dict()
trackConfigSubDict = trackConfigSubDict[part]
assert isinstance(trackConfigSubDict, dict), 'Config element {} is not a dict'.format(trackConfigSubDict)
return trackConfigSubDict
def get_formatted_option(self, valType2ValDict, mapped_chars):
assert isinstance(valType2ValDict, dict) and len(valType2ValDict.items()) == 1
for valType, value in valType2ValDict.items():
if valType == "text":
for char, mapped_char in mapped_chars.items():
value = value.replace(mapped_char, char)
elif valType == "integer":
value = int(value)
elif valType == "float":
value = float(value)
else: # boolean
value = {'true': True, 'false': False}[value]
return value
def set_custom_track_options(self, customTrackConfig, outputTrackConfig, mapped_chars):
for optKey, optType2ValDict in customTrackConfig.items():
splitKey = optKey.split('.')
trackConfigOptionParent = self.traverse_to_option_parent(splitKey, outputTrackConfig)
optVal = self.get_formatted_option(optType2ValDict, mapped_chars)
trackConfigOptionParent[splitKey[-1]] = optVal
def process_annotations(self, track):
category = track['category'].replace('__pd__date__pd__', TODAY)
outputTrackConfig = {
'style': {
'label': track['style'].get('label', 'description'),
'className': track['style'].get('className', 'feature'),
'description': track['style'].get('description', ''),
},
'overridePlugins': track['style'].get('overridePlugins', False) == 'True',
'overrideDraggable': track['style'].get('overrideDraggable', False) == 'True',
'maxHeight': track['style'].get('maxHeight', '600'),
'category': category,
}
mapped_chars = {
'>': '__gt__',
'<': '__lt__',
"'": '__sq__',
'"': '__dq__',
'[': '__ob__',
']': '__cb__',
'{': '__oc__',
'}': '__cc__',
'@': '__at__',
'#': '__pd__',
"": '__cn__'
}
for i, (dataset_path, dataset_ext, track_human_label, extra_metadata) in enumerate(track['trackfiles']):
# Unsanitize labels (element_identifiers are always sanitized by Galaxy)
for key, value in mapped_chars.items():
track_human_label = track_human_label.replace(value, key)
log.info('Processing %s / %s', category, track_human_label)
outputTrackConfig['key'] = track_human_label
# We add extra data to hash for the case of REST + SPARQL.
if 'conf' in track and 'options' in track['conf'] and 'url' in track['conf']['options']:
rest_url = track['conf']['options']['url']
else:
rest_url = ''
# I chose to use track['category'] instead of 'category' here. This
# is intentional. This way re-running the tool on a different date
# will not generate different hashes and make comparison of outputs
# much simpler.
hashData = [str(dataset_path), track_human_label, track['category'], rest_url]
hashData = '|'.join(hashData).encode('utf-8')
outputTrackConfig['label'] = hashlib.md5(hashData).hexdigest() + '_%s' % i
outputTrackConfig['metadata'] = extra_metadata
# Colour parsing is complex due to different track types having
# different colour options.
colourOptions = self.cs.parse_colours(track['conf']['options'], track['format'], gff3=dataset_path)
# This used to be done with a dict.update() call, however that wiped out any previous style settings...
for key in colourOptions:
if key == 'style':
for subkey in colourOptions['style']:
outputTrackConfig['style'][subkey] = colourOptions['style'][subkey]
else:
outputTrackConfig[key] = colourOptions[key]
if 'menus' in track['conf']['options']:
menus = self.cs.parse_menus(track['conf']['options'])
outputTrackConfig.update(menus)
customTrackConfig = track['conf']['options'].get('custom_config', {})
if customTrackConfig:
self.set_custom_track_options(customTrackConfig, outputTrackConfig, mapped_chars)
# import pprint; pprint.pprint(track)
# import sys; sys.exit()
if dataset_ext in ('gff', 'gff3'):
self.add_gff(dataset_path, dataset_ext, outputTrackConfig,
track['conf']['options']['gff'])
elif dataset_ext in ('bed', ):
self.add_bed(dataset_path, dataset_ext, outputTrackConfig,
track['conf']['options']['gff'])
elif dataset_ext in ('genbank', ):
self.add_genbank(dataset_path, dataset_ext, outputTrackConfig,
track['conf']['options']['gff'])
elif dataset_ext == 'bigwig':
self.add_bigwig(dataset_path, outputTrackConfig,
track['conf']['options']['wiggle'])
elif dataset_ext == 'bigwig_multiple':
self.add_bigwig_multiple(dataset_path, outputTrackConfig,
track['conf']['options']['wiggle'])
elif dataset_ext == 'maf':
self.add_maf(dataset_path, outputTrackConfig,
track['conf']['options']['maf'])
elif dataset_ext == 'bam':
real_indexes = track['conf']['options']['pileup']['bam_indices']['bam_index']
if not isinstance(real_indexes, list):
# <bam_indices>
# <bam_index>/path/to/a.bam.bai</bam_index>
# </bam_indices>
#
# The above will result in the 'bam_index' key containing a
# string. If there are two or more indices, the container
# becomes a list. Fun!
real_indexes = [real_indexes]
self.add_bam(dataset_path, outputTrackConfig,
track['conf']['options']['pileup'],
bam_index=real_indexes[i])
elif dataset_ext == 'blastxml':
self.add_blastxml(dataset_path, outputTrackConfig, track['conf']['options']['blast'])
elif dataset_ext == 'vcf':
self.add_vcf(dataset_path, outputTrackConfig)
elif dataset_ext == 'rest':
self.add_rest(track['conf']['options']['rest']['url'], outputTrackConfig)
elif dataset_ext == 'sparql':
sparql_query = track['conf']['options']['sparql']['query']
for key, value in mapped_chars.items():
sparql_query = sparql_query.replace(value, key)
self.add_sparql(track['conf']['options']['sparql']['url'], sparql_query, outputTrackConfig)
else:
log.warn('Do not know how to handle %s', dataset_ext)
# Return non-human label for use in other fields
yield outputTrackConfig['label']
def add_final_data(self, data):
viz_data = {}
if len(data['visibility']['default_on']) > 0:
viz_data['defaultTracks'] = ','.join(data['visibility']['default_on'])
if len(data['visibility']['always']) > 0:
viz_data['alwaysOnTracks'] = ','.join(data['visibility']['always'])
if len(data['visibility']['force']) > 0:
viz_data['forceTracks'] = ','.join(data['visibility']['force'])
generalData = {}
if data['general']['aboutDescription'] is not None:
generalData['aboutThisBrowser'] = {'description': data['general']['aboutDescription'].strip()}
generalData['view'] = {
'trackPadding': data['general']['trackPadding']
}
generalData['shareLink'] = (data['general']['shareLink'] == 'true')
generalData['show_tracklist'] = (data['general']['show_tracklist'] == 'true')
generalData['show_nav'] = (data['general']['show_nav'] == 'true')
generalData['show_overview'] = (data['general']['show_overview'] == 'true')
generalData['show_menu'] = (data['general']['show_menu'] == 'true')
generalData['hideGenomeOptions'] = (data['general']['hideGenomeOptions'] == 'true')
generalData['plugins'] = data['plugins']
viz_data.update(generalData)
self._add_json(viz_data)
if 'GCContent' in data['plugins_python']:
self._add_track_json({
"storeClass": "JBrowse/Store/Sequence/IndexedFasta",
"type": "GCContent/View/Track/GCContentXY",
"label": "GC Content",
"key": "GCContentXY",
"urlTemplate": "seq/genome.fasta",
"bicolor_pivot": 0.5,
"category": "GC Content",
"metadata": {
"tool_tool": '<a target="_blank" href="https://github.com/elsiklab/gccontent/commit/030180e75a19fad79478d43a67c566ec6">elsiklab/gccontent</a>',
"tool_tool_version": "5c8b0582ecebf9edf684c76af8075fb3d30ec3fa",
"dataset_edam_format": "",
"dataset_size": "",
"history_display_name": "",
"history_user_email": "",
"metadata_dbkey": "",
}
# TODO: Expose params for everyone.
})
self._add_track_json({
"storeClass": "JBrowse/Store/Sequence/IndexedFasta",
"type": "GCContent/View/Track/GCContentXY",
"label": "GC skew",
"key": "GCSkew",
"urlTemplate": "seq/genome.fasta",
"gcMode": "skew",
"min_score": -1,
"bicolor_pivot": 0,
"category": "GC Content",
"metadata": {
"tool_tool": '<a target="_blank" href="https://github.com/elsiklab/gccontent/commit/030180e75a19fad79478d43a67c566ec6">elsiklab/gccontent</a>',
"tool_tool_version": "5c8b0582ecebf9edf684c76af8075fb3d30ec3fa",
"dataset_edam_format": "",
"dataset_size": "",
"history_display_name": "",
"history_user_email": "",
"metadata_dbkey": "",
}
# TODO: Expose params for everyone.
})
if 'ComboTrackSelector' in data['plugins_python']:
with open(os.path.join(self.outdir, 'data', 'trackList.json'), 'r') as handle:
trackListJson = json.load(handle)
trackListJson.update({
"trackSelector": {
"renameFacets": {
"tool_tool": "Tool ID",
"tool_tool_id": "Tool ID",
"tool_tool_version": "Tool Version",
"dataset_edam_format": "EDAM",
"dataset_size": "Size",
"history_display_name": "History Name",
"history_user_email": "Owner",
"metadata_dbkey": "Dbkey",
},
"displayColumns": [
"key",
"tool_tool",
"tool_tool_version",
"dataset_edam_format",
"dataset_size",
"history_display_name",
"history_user_email",
"metadata_dbkey",
],
"type": "Faceted",
"title": ["Galaxy Metadata"],
"icon": "https://galaxyproject.org/images/logos/galaxy-icon-square.png",
"escapeHTMLInData": False
},
"trackMetadata": {
"indexFacets": [
"category",
"key",
"tool_tool_id",
"tool_tool_version",
"dataset_edam_format",
"history_user_email",
"history_display_name"
]
}
})
with open(os.path.join(self.outdir, 'data', 'trackList2.json'), 'w') as handle:
json.dump(trackListJson, handle)
def clone_jbrowse(self, jbrowse_dir, destination, minimal=False):
"""Clone a JBrowse directory into a destination directory.
"""
if minimal:
# Should be the absolute minimum required for JBrowse to function.
interesting = [
'dist', 'img', 'index.html', 'jbrowse.conf', 'jbrowse_conf.json', 'webpack.config.js'
]
for i in interesting:
cmd = ['cp', '-r', os.path.join(jbrowse_dir, i), destination]
self.subprocess_check_call(cmd)
else:
# JBrowse seems to have included some bad symlinks, cp ignores bad symlinks
# unlike copytree
cmd = ['cp', '-r', os.path.join(jbrowse_dir, '.'), destination]
self.subprocess_check_call(cmd)
cmd = ['mkdir', '-p', os.path.join(destination, 'data', 'raw')]
self.subprocess_check_call(cmd)
# http://unix.stackexchange.com/a/38691/22785
# JBrowse releases come with some broken symlinks
cmd = ['find', destination, '-type', 'l', '-xtype', 'l']
symlinks = self.subprocess_check_output(cmd)
for i in symlinks:
try:
os.unlink(i)
except OSError:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="", epilog="")
parser.add_argument('xml', type=argparse.FileType('r'), help='Track Configuration')
parser.add_argument('--jbrowse', help='Folder containing a jbrowse release')
parser.add_argument('--outdir', help='Output directory', default='out')
parser.add_argument('--standalone', choices=['complete', 'minimal', 'data'], help='Standalone mode includes a copy of JBrowse')
parser.add_argument('--version', '-V', action='version', version="%(prog)s 0.8.0")
args = parser.parse_args()
tree = ET.parse(args.xml.name)
root = tree.getroot()
# This should be done ASAP
GALAXY_INFRASTRUCTURE_URL = root.find('metadata/galaxyUrl').text
# Sometimes this comes as `localhost` without a protocol
if not GALAXY_INFRASTRUCTURE_URL.startswith('http'):
# so we'll prepend `http://` and hope for the best. Requests *should*
# be GET and not POST so it should redirect OK
GALAXY_INFRASTRUCTURE_URL = 'http://' + GALAXY_INFRASTRUCTURE_URL
jc = JbrowseConnector(
jbrowse=args.jbrowse,
outdir=args.outdir,
genomes=[
{
'path': os.path.realpath(x.attrib['path']),
'meta': metadata_from_node(x.find('metadata'))
}
for x in root.findall('metadata/genomes/genome')
],
standalone=args.standalone,
gencode=root.find('metadata/gencode').text
)
extra_data = {
'visibility': {
'default_on': [],
'default_off': [],
'force': [],
'always': [],
},
'general': {
'defaultLocation': root.find('metadata/general/defaultLocation').text,
'trackPadding': int(root.find('metadata/general/trackPadding').text),
'shareLink': root.find('metadata/general/shareLink').text,
'aboutDescription': root.find('metadata/general/aboutDescription').text,
'show_tracklist': root.find('metadata/general/show_tracklist').text,
'show_nav': root.find('metadata/general/show_nav').text,
'show_overview': root.find('metadata/general/show_overview').text,
'show_menu': root.find('metadata/general/show_menu').text,
'hideGenomeOptions': root.find('metadata/general/hideGenomeOptions').text,
},
'plugins': [],
'plugins_python': [],
}
plugins = root.find('plugins').attrib
if plugins['GCContent'] == 'True':
extra_data['plugins_python'].append('GCContent')
extra_data['plugins'].append({
'location': 'https://cdn.jsdelivr.net/gh/elsiklab/gccontent@5c8b0582ecebf9edf684c76af8075fb3d30ec3fa/',
'name': 'GCContent'
})
# Not needed in 1.16.1: it's built in the conda package now, and this plugin doesn't need to be enabled anywhere
# if plugins['Bookmarks'] == 'True':
# extra_data['plugins'].append({
# 'location': 'https://cdn.jsdelivr.net/gh/TAMU-CPT/bookmarks-jbrowse@5242694120274c86e1ccd5cb0e5e943e78f82393/',
# 'name': 'Bookmarks'
# })
# Not needed in 1.16.1: it's built in the conda package now, and this plugin doesn't need to be enabled anywhere
if plugins['ComboTrackSelector'] == 'True':
extra_data['plugins_python'].append('ComboTrackSelector')
# Not needed in 1.16.1: it's built in the conda package now, and this plugin doesn't need to be enabled anywhere
# extra_data['plugins'].append({
# 'location': 'https://cdn.jsdelivr.net/gh/Arabidopsis-Information-Portal/ComboTrackSelector@52403928d5ccbe2e3a86b0fa5eb8e61c0f2e2f57/',
# 'icon': 'https://galaxyproject.org/images/logos/galaxy-icon-square.png',
# 'name': 'ComboTrackSelector'
# })
if plugins['theme'] == 'Minimalist':
extra_data['plugins'].append({
'location': 'https://cdn.jsdelivr.net/gh/erasche/jbrowse-minimalist-theme@d698718442da306cf87f033c72ddb745f3077775/',
'name': 'MinimalistTheme'
})
elif plugins['theme'] == 'Dark':
extra_data['plugins'].append({
'location': 'https://cdn.jsdelivr.net/gh/erasche/jbrowse-dark-theme@689eceb7e33bbc1b9b15518d45a5a79b2e5d0a26/',
'name': 'DarkTheme'
})
if plugins['BlastView'] == 'True':
extra_data['plugins_python'].append('BlastView')
extra_data['plugins'].append({
'location': 'https://cdn.jsdelivr.net/gh/TAMU-CPT/blastview@97572a21b7f011c2b4d9a0b5af40e292d694cbef/',
'name': 'BlastView'
})
for track in root.findall('tracks/track'):
track_conf = {}
track_conf['trackfiles'] = []
is_multi_bigwig = False
try:
if track.find('options/wiggle/multibigwig') and (track.find('options/wiggle/multibigwig').text == 'True'):
is_multi_bigwig = True
multi_bigwig_paths = []
except KeyError:
pass
trackfiles = track.findall('files/trackFile')
if trackfiles:
for x in track.findall('files/trackFile'):
if is_multi_bigwig:
multi_bigwig_paths.append((x.attrib['label'], os.path.realpath(x.attrib['path'])))
else:
if trackfiles:
metadata = metadata_from_node(x.find('metadata'))
track_conf['trackfiles'].append((
os.path.realpath(x.attrib['path']),
x.attrib['ext'],
x.attrib['label'],
metadata
))
else:
# For tracks without files (rest, sparql)
track_conf['trackfiles'].append((
'', # N/A, no path for rest or sparql
track.attrib['format'],
track.find('options/label').text,
{}
))
if is_multi_bigwig:
metadata = metadata_from_node(x.find('metadata'))
track_conf['trackfiles'].append((
multi_bigwig_paths, # Passing an array of paths to represent as one track
'bigwig_multiple',
'MultiBigWig', # Giving an hardcoded name for now
{} # No metadata for multiple bigwig
))
track_conf['category'] = track.attrib['cat']
track_conf['format'] = track.attrib['format']
try:
# Only pertains to gff3 + blastxml. TODO?
track_conf['style'] = {t.tag: t.text for t in track.find('options/style')}
except TypeError:
track_conf['style'] = {}
pass
track_conf['conf'] = etree_to_dict(track.find('options'))
keys = jc.process_annotations(track_conf)
for key in keys:
extra_data['visibility'][track.attrib.get('visibility', 'default_off')].append(key)
jc.add_final_data(extra_data)
jc.generate_names()
|
martenson/tools-iuc
|
tools/jbrowse/jbrowse.py
|
Python
|
mit
| 52,886
|
[
"BLAST",
"Galaxy"
] |
9481ff71a3dd3d78c710622945766b1fbb17b7d397447ec2199269a26014c812
|
##
## @file numfig.py
##
## @brief For numbering figures in Sphinx
##
## @version $Id: numfig.py 10 2012-10-07 14:39:47Z jrobcary $
##
## Started from https://bitbucket.org/arjones6/sphinx-numfig/wiki/Home
##
## Copyright © 2005-2012, Tech-X Corporation, Boulder, CO
## Free for any use whatsoever.
##
from docutils.nodes \
import figure, caption, Text, reference, raw, SkipNode, Element
from sphinx.roles import XRefRole
#
# Element classes
#
class page_ref(reference):
pass
class num_ref(reference):
pass
#
# Visit/depart functions
#
# Why is SkipNode raised?
#
def skip_page_ref(self, node):
raise SkipNode
def skip_num_ref(self, node):
raise SkipNode
def latex_visit_page_ref(self, node):
self.body.append("\\pageref{%s:%s}" % (node['refdoc'], node['reftarget']))
raise SkipNode
def latex_visit_num_ref(self, node):
fields = node['reftarget'].split('#')
if len(fields) > 1:
label, target = fields
ref_link = '%s:%s' % (node['refdoc'], target)
latex = "\\hyperref[%s]{%s \\ref*{%s}}" % (ref_link, label, ref_link)
self.body.append(latex)
else:
self.body.append('\\ref{%s:%s}' % (node['refdoc'], fields[0]))
raise SkipNode
def latex_depart_num_ref(self, node):
pass
def html_visit_num_ref(self, node):
fields = node['reftarget'].split('#')
if len(fields) > 1:
label, target = fields
target_file = ''
if node['refdoc']==target_file:
# Target file and curent file are the same
link = "%s.html#%s" %(node['refdoc'], target.lower())
else:
link = "%s.html#%s" %(target_file, target.lower())
html = '<a href="%s">%s</a>' %(link, label)
self.body.append(html)
else:
self.body.append('<a href="%s.html">%s</a>' % (node['refdoc'], fields[0]))
def html_depart_num_ref(self, node):
pass
def compute_numfig_fignums(app, doctree):
# Generate figure numbers for each figure
env = app.builder.env
i = getattr(env, 'i', 1)
figids = getattr(env, 'figids', {})
figid_docname_map = getattr(env, 'figid_docname_map', {})
for figure_info in doctree.traverse(figure):
if app.builder.name != 'latex' and app.config.numfig_number_figures:
for cap in figure_info.traverse(caption):
cap[0] = Text("%s %d: %s" % \
(app.config.numfig_figure_caption_prefix, i, cap[0]))
for id in figure_info['ids']:
figids[id] = i
figid_docname_map[id] = env.docname
i += 1
env.figid_docname_map = figid_docname_map
env.i = i
env.figids = figids
def insert_numfig_links(app, doctree, docname):
# Replace numfig nodes with links
figids = app.builder.env.figids
if app.builder.name != 'latex':
for ref_info in doctree.traverse(num_ref):
if '#' in ref_info['reftarget']:
label, target = ref_info['reftarget'].split('#')
labelfmt = label + " %d"
else:
labelfmt = '%d'
target = ref_info['reftarget']
if target not in figids:
continue
if app.builder.name == 'html':
target_doc = app.builder.env.figid_docname_map[target]
link = "%s#%s" % (app.builder.get_relative_uri(docname, target_doc),
target)
html = '<a href="%s">%s</a>' % (link, labelfmt %(figids[target]))
ref_info.replace_self(raw(html, html, format='html'))
else:
ref_info.replace_self(Text(labelfmt % (figids[target])))
def setup(app):
# Are these used?
app.add_config_value('numfig_number_figures', True, True)
app.add_config_value('numfig_figure_caption_prefix', "Figure", True)
app.add_node(page_ref,
text=(skip_page_ref, None),
html=(skip_page_ref, None),
latex=(latex_visit_page_ref, None))
app.add_role('page', XRefRole(nodeclass=page_ref))
app.add_node(num_ref,
text=(skip_num_ref, None),
html=(html_visit_num_ref, html_depart_num_ref),
latex=(latex_visit_num_ref, latex_depart_num_ref))
app.add_role('num', XRefRole(nodeclass=num_ref))
app.connect('doctree-read', compute_numfig_fignums)
app.connect('doctree-resolved', insert_numfig_links)
|
WASP-System/central
|
wasp-doc/src/sphinx/sphinx-plugins/numfig/numfig.py
|
Python
|
agpl-3.0
| 4,026
|
[
"VisIt"
] |
df4d748acd8e0ccc0399b9c8bb922b76d9fa63755eca89477d910c335fa8a8bb
|
# -*- coding: utf-8 -*-
# Author: Óscar Nájera
# License: 3-clause BSD
"""
Backreferences Generator
========================
Parses example file code in order to keep track of used functions
"""
from __future__ import print_function
import ast
import os
# Try Python 2 first, otherwise load from Python 3
try:
import cPickle as pickle
except ImportError:
import pickle
# Try Python 3 first, otherwise load from Python 2
try:
from html import escape
except ImportError:
from functools import partial
from xml.sax.saxutils import escape
escape = partial(escape, entities={'"': '"'})
class NameFinder(ast.NodeVisitor):
"""Finds the longest form of variable names and their imports in code
Only retains names from imported modules.
"""
def __init__(self):
super(NameFinder, self).__init__()
self.imported_names = {}
self.accessed_names = set()
def visit_Import(self, node, prefix=''):
for alias in node.names:
local_name = alias.asname or alias.name
self.imported_names[local_name] = prefix + alias.name
def visit_ImportFrom(self, node):
self.visit_Import(node, node.module + '.')
def visit_Name(self, node):
self.accessed_names.add(node.id)
def visit_Attribute(self, node):
attrs = []
while isinstance(node, ast.Attribute):
attrs.append(node.attr)
node = node.value
if isinstance(node, ast.Name):
# This is a.b, not e.g. a().b
attrs.append(node.id)
self.accessed_names.add('.'.join(reversed(attrs)))
else:
# need to get a in a().b
self.visit(node)
def get_mapping(self):
for name in self.accessed_names:
local_name = name.split('.', 1)[0]
remainder = name[len(local_name):]
if local_name in self.imported_names:
# Join import path to relative path
full_name = self.imported_names[local_name] + remainder
yield name, full_name
def get_short_module_name(module_name, obj_name):
""" Get the shortest possible module name """
parts = module_name.split('.')
short_name = module_name
for i in range(len(parts) - 1, 0, -1):
short_name = '.'.join(parts[:i])
try:
exec('from %s import %s' % (short_name, obj_name))
except Exception: # libraries can throw all sorts of exceptions...
# get the last working module name
short_name = '.'.join(parts[:(i + 1)])
break
return short_name
def identify_names(code):
"""Builds a codeobj summary by identifying and resolving used names
>>> code = '''
... from a.b import c
... import d as e
... print(c)
... e.HelloWorld().f.g
... '''
>>> for name, o in sorted(identify_names(code).items()):
... print(name, o['name'], o['module'], o['module_short'])
c c a.b a.b
e.HelloWorld HelloWorld d d
"""
finder = NameFinder()
try:
finder.visit(ast.parse(code))
except SyntaxError:
return {}
example_code_obj = {}
for name, full_name in finder.get_mapping():
# name is as written in file (e.g. np.asarray)
# full_name includes resolved import path (e.g. numpy.asarray)
splitted = full_name.rsplit('.', 1)
if len(splitted) == 1:
# module without attribute. This is not useful for
# backreferences
continue
module, attribute = splitted
# get shortened module name
module_short = get_short_module_name(module, attribute)
cobj = {'name': attribute, 'module': module,
'module_short': module_short}
example_code_obj[name] = cobj
return example_code_obj
def scan_used_functions(example_file, gallery_conf):
"""save variables so we can later add links to the documentation"""
example_code_obj = identify_names(open(example_file).read())
if example_code_obj:
codeobj_fname = example_file[:-3] + '_codeobj.pickle'
with open(codeobj_fname, 'wb') as fid:
pickle.dump(example_code_obj, fid, pickle.HIGHEST_PROTOCOL)
backrefs = set('{module_short}.{name}'.format(**entry)
for entry in example_code_obj.values()
if entry['module'].startswith(gallery_conf['doc_module']))
return backrefs
THUMBNAIL_TEMPLATE = """
.. raw:: html
<div class="sphx-glr-thumbcontainer" tooltip="{snippet}">
.. only:: html
.. figure:: /{thumbnail}
:ref:`sphx_glr_{ref_name}`
.. raw:: html
</div>
"""
BACKREF_THUMBNAIL_TEMPLATE = THUMBNAIL_TEMPLATE + """
.. only:: not html
* :ref:`sphx_glr_{ref_name}`
"""
def _thumbnail_div(full_dir, fname, snippet, is_backref=False):
"""Generates RST to place a thumbnail in a gallery"""
thumb = os.path.join(full_dir, 'images', 'thumb',
'sphx_glr_%s_thumb.png' % fname[:-3])
# Inside rst files forward slash defines paths
thumb = thumb.replace(os.sep, "/")
ref_name = os.path.join(full_dir, fname).replace(os.path.sep, '_')
template = BACKREF_THUMBNAIL_TEMPLATE if is_backref else THUMBNAIL_TEMPLATE
return template.format(snippet=escape(snippet),
thumbnail=thumb, ref_name=ref_name)
def write_backreferences(seen_backrefs, gallery_conf,
target_dir, fname, snippet):
"""Writes down back reference files, which include a thumbnail list
of examples using a certain module"""
if gallery_conf['backreferences_dir'] is None:
return
example_file = os.path.join(target_dir, fname)
build_target_dir = os.path.relpath(target_dir, gallery_conf['src_dir'])
backrefs = scan_used_functions(example_file, gallery_conf)
for backref in backrefs:
include_path = os.path.join(gallery_conf['src_dir'],
gallery_conf['backreferences_dir'],
'%s.examples' % backref)
seen = backref in seen_backrefs
with open(include_path, 'a' if seen else 'w') as ex_file:
if not seen:
heading = '\n\nExamples using ``%s``' % backref
ex_file.write(heading + '\n')
ex_file.write('^' * len(heading) + '\n')
ex_file.write(_thumbnail_div(build_target_dir, fname, snippet,
is_backref=True))
seen_backrefs.add(backref)
|
bthirion/nistats
|
doc/sphinxext/sphinx_gallery/backreferences.py
|
Python
|
bsd-3-clause
| 6,560
|
[
"VisIt"
] |
3aea118113af58cd423c49c9b422118b85b3154aca7d72c5a92fb5f0e76f458f
|
"""Gromacs molecular dynamics simulation datasets.
"""
from .access import load_benzene
from .access import (load_benzene, load_ABFE,
load_expanded_ensemble_case_1,
load_expanded_ensemble_case_2,
load_expanded_ensemble_case_3,
load_water_particle_with_potential_energy,
load_water_particle_with_total_energy,
load_water_particle_without_energy)
|
alchemistry/alchemtest
|
src/alchemtest/gmx/__init__.py
|
Python
|
bsd-3-clause
| 473
|
[
"Gromacs"
] |
81d509b47df54f53afe8c2626a3f086eaabe228f4c7048d3d033c56697ae6b8e
|
""" NotificationDB class is a front-end to the Notifications database
"""
__RCSID__ = "$Id$"
import time
import types
from DIRAC import gConfig, gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.Mail import Mail
from DIRAC.Core.Base.DB import DB
from DIRAC.Core.Utilities import DEncode
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
class NotificationDB( DB ):
def __init__( self ):
DB.__init__( self, 'NotificationDB', 'Framework/NotificationDB' )
result = self.__initializeDB()
if not result[ 'OK' ]:
self.log.fatal( "Cannot initialize DB!", result[ 'Message' ] )
self.__alarmQueryFields = [ 'alarmid', 'author', 'creationtime', 'modtime', 'subject',
'status', 'priority', 'notifications', 'body', 'assignee', 'alarmkey' ]
self.__alarmLogFields = [ 'timestamp', 'author', 'comment', 'modifications' ]
self.__notificationQueryFields = ( 'id', 'user', 'seen', 'message', 'timestamp' )
self.__newAlarmMandatoryFields = [ 'author', 'subject', 'status', 'notifications', 'body', 'assignee', 'priority' ]
self.__updateAlarmIdentificationFields = [ 'id', 'alarmKey' ]
self.__updateAlarmMandatoryFields = [ 'author' ]
self.__updateAlarmAtLeastOneField = [ 'comment', 'modifications' ]
self.__updateAlarmModificableFields = [ 'status', 'assignee', 'priority' ]
self.__validAlarmStatus = [ 'Open', 'OnGoing', 'Closed', 'Testing' ]
self.__validAlarmNotifications = [ 'Web', 'Mail', 'SMS' ]
self.__validAlarmPriorities = [ 'Low', 'Medium', 'High', 'Extreme' ]
def __initializeDB( self ):
retVal = self._query( "show tables" )
if not retVal[ 'OK' ]:
return retVal
tablesInDB = [ t[0] for t in retVal[ 'Value' ] ]
tablesToCreate = {}
if 'ntf_Alarms' not in tablesInDB:
tablesToCreate[ 'ntf_Alarms' ] = { 'Fields' : {'AlarmId' : 'INTEGER UNSIGNED AUTO_INCREMENT NOT NULL',
'AlarmKey' : 'VARCHAR(32) NOT NULL',
'Author' : 'VARCHAR(64) NOT NULL',
'CreationTime' : 'DATETIME NOT NULL',
'ModTime' : 'DATETIME NOT NULL',
'Subject' : 'VARCHAR(255) NOT NULL',
'Status' : 'VARCHAR(64) NOT NULL',
'Priority' : 'VARCHAR(32) NOT NULL',
'Body' : 'BLOB',
'Assignee' : 'VARCHAR(64) NOT NULL',
'Notifications' : 'VARCHAR(128) NOT NULL'
},
'PrimaryKey' : 'AlarmId',
'Indexes' : { 'Status' : [ 'Status' ],
'Assignee' : [ 'Assignee' ] }
}
if 'ntf_AssigneeGroups' not in tablesInDB:
tablesToCreate[ 'ntf_AssigneeGroups' ] = { 'Fields' : { 'AssigneeGroup' : 'VARCHAR(64) NOT NULL',
'User' : 'VARCHAR(64) NOT NULL',
},
'Indexes' : { 'ag' : [ 'AssigneeGroup' ] }
}
if 'ntf_AlarmLog' not in tablesInDB:
tablesToCreate[ 'ntf_AlarmLog' ] = { 'Fields' : { 'AlarmId' : 'INTEGER UNSIGNED NOT NULL',
'Timestamp' : 'DATETIME NOT NULL',
'Author' : 'VARCHAR(64) NOT NULL',
'Comment' : 'BLOB',
'Modifications' : 'VARCHAR(255)',
},
'Indexes' : { 'AlarmID' : [ 'AlarmId' ] }
}
if 'ntf_AlarmFollowers' not in tablesInDB:
tablesToCreate[ 'ntf_AlarmFollowers' ] = { 'Fields' : { 'AlarmId' : 'INTEGER UNSIGNED NOT NULL',
'User' : 'VARCHAR(64) NOT NULL',
'Mail' : 'TINYINT(1) DEFAULT 0',
'Notification' : 'TINYINT(1) DEFAULT 1',
'SMS' : 'TINYINT(1) DEFAULT 0',
},
'Indexes' : { 'AlarmID' : [ 'AlarmId' ] }
}
if 'ntf_Notifications' not in tablesInDB:
tablesToCreate[ 'ntf_Notifications' ] = { 'Fields' : { 'Id' : 'INTEGER UNSIGNED AUTO_INCREMENT NOT NULL',
'User' : 'VARCHAR(64) NOT NULL',
'Message' : 'BLOB NOT NULL',
'Seen' : 'TINYINT(1) NOT NULL DEFAULT 0',
'Expiration' : 'DATETIME',
'Timestamp' : 'DATETIME',
'DeferToMail' : 'TINYINT(1) NOT NULL DEFAULT 1',
},
'PrimaryKey' : 'Id',
}
if tablesToCreate:
return self._createTables( tablesToCreate )
return S_OK()
def __checkAlarmField( self, name, value ):
name = name.lower()
if name == 'status':
if value not in self.__validAlarmStatus:
return S_ERROR( "Status %s is invalid. Valid ones are: %s" % ( value, self.__validAlarmStatus ) )
elif name == 'priority':
if value not in self.__validAlarmPriorities:
return S_ERROR( "Type %s is invalid. Valid ones are: %s" % ( value, self.__validAlarmPriorities ) )
elif name == 'assignee':
result = self.getUserAsignees( value )
if not result[ 'OK' ]:
return result
if not result[ 'Value' ]:
return S_ERROR( "%s is not a known assignee" % value )
return result
return S_OK()
def newAlarm( self, alarmDef ):
""" Create a new alarm record
"""
followers = []
for field in self.__newAlarmMandatoryFields:
if field not in alarmDef:
return S_ERROR( "Oops. Missing %s" % field )
result = self.__checkAlarmField( field, alarmDef[ field ] )
if not result[ 'OK' ]:
return result
if field == 'assignee':
followers = result[ 'Value' ]
author = alarmDef[ 'author' ]
if author not in followers:
followers.append( author )
sqlFieldsName = []
sqlFieldsValue = []
for field in self.__newAlarmMandatoryFields:
if field == 'notifications':
notifications = {}
for nType in self.__validAlarmNotifications:
if nType in alarmDef[ field ]:
notifications[ nType ] = 1
else:
notifications[ nType ] = 0
val = DEncode.encode( notifications )
else:
val = alarmDef[ field ]
#Add to the list of fields to add
sqlFieldsName.append( field )
result = self._escapeString( val )
if result['OK']:
sqlFieldsValue.append( result['Value'] )
else:
return S_ERROR( 'Failed to escape value %s' % val )
sqlFieldsName.extend( [ 'CreationTime', 'ModTime' ] )
sqlFieldsValue.extend( [ 'UTC_TIMESTAMP()', 'UTC_TIMESTAMP()' ] )
#Get the defined alarmkey and generate a random one if not defined
if 'alarmKey' in alarmDef:
result = self._escapeString( alarmDef[ 'alarmKey' ] )
if result['OK']:
alarmKey = result['Value']
else:
return S_ERROR( 'Failed to escape value %s for key AlarmKey' % val )
gLogger.info( "Checking there are no alarms with key %s" % alarmKey )
result = self._query( "SELECT AlarmId FROM `ntf_Alarms` WHERE AlarmKey=%s" % alarmKey )
if not result[ 'OK' ]:
return result
if result[ 'Value' ]:
return S_ERROR( "Oops, alarm with id %s has the same alarm key!" % result[ 'Value' ][0][0] )
else:
alarmKey = str( time.time() )[-31:]
sqlFieldsName.append( 'AlarmKey' )
sqlFieldsValue.append( alarmKey )
sqlInsert = "INSERT INTO `ntf_Alarms` (%s) VALUES (%s)" % ( ",".join( sqlFieldsName ),
",".join( sqlFieldsValue ) )
result = self._update( sqlInsert )
if not result['OK']:
return result
alarmId = result[ 'lastRowId' ]
for follower in followers:
result = self.modifyFollowerForAlarm( alarmId, follower, notifications )
if not result[ 'OK' ]:
varMsg = "\nFollower: %s\nAlarm: %s\nError: %s" % ( follower, alarmId, result['Message'] )
self.log.error( "Couldn't set follower for alarm", varMsg )
self.__notifyAlarm( alarmId )
return S_OK( alarmId )
def deleteAlarmsByAlarmKey( self, alarmKeyList ):
alarmsIdList = []
for alarmKey in alarmKeyList:
result = self.__getAlarmIdFromKey( alarmKey )
if not result[ 'OK' ]:
return result
alarmId = result[ 'Value' ]
alarmsIdList.append( alarmId )
self.log.info( "Trying to delete alarms with:\n alamKey %s\n alarmId %s" % ( alarmKeyList, alarmsIdList ) )
return self.deleteAlarmsByAlarmId( alarmsIdList )
def deleteAlarmsByAlarmId( self, alarmIdList ):
self.log.info( "Trying to delete alarms with ids %s" % alarmIdList )
try:
alarmId = int( alarmIdList )
alarmIdList = [ alarmId ]
except:
pass
try:
alarmIdList = [ int( alarmId ) for alarmId in alarmIdList ]
except:
self.log.error( "At least one alarmId is not a number", str( alarmIdList ) )
return S_ERROR( "At least one alarmId is not a number: %s" % str( alarmIdList ) )
tablesToCheck = ( "ntf_AlarmLog", "ntf_AlarmFollowers", "ntf_Alarms" )
alamsSQLList = ",".join( [ "%d" % alarmId for alarmId in alarmIdList ] )
for tableName in tablesToCheck:
delSql = "DELETE FROM `%s` WHERE AlarmId in ( %s )" % ( tableName, alamsSQLList )
result = self._update( delSql )
if not result[ 'OK' ]:
self.log.error( "Could not delete alarm", "from table %s: %s" % ( tableName, result[ 'Message' ] ) )
return S_OK()
def __processUpdateAlarmModifications( self, modifications ):
if type( modifications ) != types.DictType:
return S_ERROR( "Modifications must be a dictionary" )
updateFields = []
followers = []
for field in modifications:
if field not in self.__updateAlarmModificableFields:
return S_ERROR( "%s is not a valid modificable field" % field )
value = modifications[ field ]
result = self.__checkAlarmField( field , value )
if not result[ 'OK' ]:
return result
if field == 'assignee':
followers = result[ 'Value' ]
result = self._escapeString( modifications[ field ] )
if not result[ 'OK' ]:
return result
updateFields.append( "%s=%s" % ( field, result[ 'Value' ] ) )
return S_OK( ( ", ".join( updateFields ), DEncode.encode( modifications ), followers ) )
def __getAlarmIdFromKey( self, alarmKey ):
result = self._escapeString( alarmKey )
if not result[ 'OK' ]:
return S_ERROR( "Cannot escape alarmKey %s" % alarmKey )
alarmKey = result[ 'Value' ]
sqlQuery = "SELECT AlarmId FROM `ntf_Alarms` WHERE AlarmKey=%s" % alarmKey
result = self._query( sqlQuery )
if result[ 'OK' ]:
result[ 'Value' ] = result[ 'Value' ][0][0]
return result
def updateAlarm( self, updateReq ):
#Discover alarm identification
idOK = False
for field in self.__updateAlarmIdentificationFields:
if field in updateReq:
idOK = True
if not idOK:
return S_ERROR( "Need at least one field to identify which alarm to update! %s" % self.__updateAlarmIdentificationFields )
if 'alarmKey' in updateReq:
alarmKey = updateReq[ 'alarmKey' ]
result = self.__getAlarmIdFromKey( alarmKey )
if not result[ 'OK' ]:
self.log.error( "Could not get alarm id for key", " %s: %s" % ( alarmKey, result[ 'Value' ] ) )
return result
updateReq[ 'id' ] = result[ 'Value' ]
self.log.info( "Retrieving alarm key %s maps to id %s" % ( alarmKey, updateReq[ 'id' ] ) )
#Check fields
for field in self.__updateAlarmMandatoryFields:
if field not in updateReq:
return S_ERROR( "Oops. Missing %s" % field )
validReq = False
for field in self.__updateAlarmAtLeastOneField:
if field in updateReq:
validReq = True
if not validReq:
return S_OK( "Requirement needs at least one of %s" % " ".join( self.__updateAlarmAtLeastOneField ) )
author = updateReq[ 'author' ]
followers = [ author ]
if author not in Registry.getAllUsers():
return S_ERROR( "%s is not a known user" % author )
result = self._escapeString( author )
if not result[ 'OK' ]:
return result
author = result[ 'Value' ]
try:
alarmId = int( updateReq[ 'id' ] )
except:
return S_ERROR( "Oops, Alarm id is not valid! (bad boy...)" )
result = self._query( "SELECT AlarmId FROM `ntf_Alarms` WHERE AlarmId=%d" % alarmId )
if not result[ 'OK' ]:
return result
if not result[ 'Value' ]:
return S_ERROR( "Alarm %s does not exist!" % alarmId )
sqlFields = [ 'AlarmId', 'Author', 'Timestamp' ]
sqlValues = [ "%d" % alarmId, author, 'UTC_TIMESTAMP()' ]
rawComment = ""
if 'comment' in updateReq:
rawComment = updateReq[ 'comment' ]
result = self._escapeString( rawComment )
if not result[ 'OK' ]:
return result
sqlFields.append( "Comment" )
sqlValues.append( result[ 'Value' ] )
modifications = False
if 'modifications' in updateReq:
modifications = updateReq[ 'modifications' ]
result = self.__processUpdateAlarmModifications( modifications )
if not result[ 'OK' ]:
return result
alarmModsSQL, encodedMods, newFollowers = result[ 'Value' ]
sqlFields.append( "Modifications" )
result = self._escapeString( encodedMods )
if not result[ 'OK' ]:
return result
sqlValues.append( result[ 'Value' ] )
if newFollowers:
followers.extend( newFollowers )
logSQL = "INSERT INTO `ntf_AlarmLog` (%s) VALUES (%s)" % ( ",".join( sqlFields ), ",".join( sqlValues ) )
result = self._update( logSQL )
if not result[ 'OK' ]:
return result
modSQL = "ModTime=UTC_TIMESTAMP()"
if modifications:
modSQL = "%s, %s" % ( modSQL, alarmModsSQL )
updateSQL = "UPDATE `ntf_Alarms` SET %s WHERE AlarmId=%d" % ( modSQL, alarmId )
result = self._update( updateSQL )
if not result[ 'OK' ]:
return result
#Get notifications config
sqlQuery = "SELECT Notifications FROM `ntf_Alarms` WHERE AlarmId=%s" % alarmId
result = self._query( sqlQuery )
if not result[ 'OK' ] or not result[ 'Value' ]:
self.log.error( "Could not retrieve default notifications for alarm", "%s" % alarmId )
return S_OK( alarmId )
notificationsDict = DEncode.decode( result[ 'Value' ][0][0] )[0]
for v in self.__validAlarmNotifications:
if v not in notificationsDict:
notificationsDict[ v ] = 0
for follower in followers:
result = self.modifyFollowerForAlarm( alarmId, follower, notificationsDict, overwrite = False )
if not result[ 'OK' ]:
varMsg = "\nFollower: %s\nAlarm: %s\nError: %s" % ( follower, alarmId, result['Message'] )
self.log.error( "Couldn't set follower for alarm", varMsg )
return self.__notifyAlarm( alarmId )
def __notifyAlarm( self, alarmId ):
result = self.getSubscribersForAlarm( alarmId )
if not result[ 'OK' ]:
return result
subscribers = result[ 'Value' ]
needLongText = False
if subscribers[ 'mail' ]:
needLongText = True
result = self.getAlarmInfo( alarmId )
if not result[ 'OK' ]:
return result
alarmInfo = result[ 'Value' ]
result = self.getAlarmLog( alarmId )
if not result[ 'OK' ]:
return result
alarmLog = result[ 'Value' ]
if subscribers[ 'notification' ]:
msg = self.__generateAlarmInfoMessage( alarmInfo )
logMsg = self.__generateAlarmLogMessage( alarmLog, True )
if logMsg:
msg = "%s\n\n%s\nLast modification:\n%s" % ( msg, "*"*30, logMsg )
for user in subscribers[ 'notification' ]:
self.addNotificationForUser( user, msg, 86400, deferToMail = True )
if subscribers[ 'mail' ]:
msg = self.__generateAlarmInfoMessage( alarmInfo )
logMsg = self.__generateAlarmLogMessage( alarmLog )
if logMsg:
msg = "%s\n\n%s\nAlarm Log:\n%s" % ( msg, "*"*30, logMsg )
subject = "Update on alarm %s" % alarmId
else:
subject = "New alarm %s" % alarmId
for user in subscribers[ 'mail' ]:
self.__sendMailToUser( user, subject, msg )
if subscribers[ 'sms' ]:
#TODO
pass
return S_OK()
def __generateAlarmLogMessage( self, alarmLog, showOnlyLast = False ):
if len( alarmLog[ 'Records' ] ) == 0:
return ""
records = alarmLog[ 'Records' ]
if showOnlyLast:
logToShow = [-1]
else:
logToShow = range( len( records ) - 1, -1, -1 )
finalMessage = []
for iD in logToShow:
rec = records[ iD ]
data = {}
for i in range( len( alarmLog[ 'ParameterNames' ] ) ):
if rec[i]:
data[ alarmLog[ 'ParameterNames' ][i] ] = rec[i]
#[ 'timestamp', 'author', 'comment', 'modifications' ]
msg = [ " Entry by : %s" % data[ 'author' ] ]
msg.append( " On : %s" % data[ 'timestamp' ].strftime( "%Y/%m/%d %H:%M:%S" ) )
if 'modifications' in data:
mods = data[ 'modifications' ]
keys = mods.keys()
keys.sort()
msg.append( " Modificaitons:" )
for key in keys:
msg.append( " %s -> %s" % ( key, mods[ key ] ) )
if 'comment' in data:
msg.append( " Comment:\n\n%s" % data[ 'comment' ] )
finalMessage.append( "\n".join( msg ) )
return "\n\n===============\n".join( finalMessage )
def __generateAlarmInfoMessage( self, alarmInfo ):
#[ 'alarmid', 'author', 'creationtime', 'modtime', 'subject', 'status', 'type', 'body', 'assignee' ]
msg = " Alarm %6d\n" % alarmInfo[ 'alarmid' ]
msg += " Author : %s\n" % alarmInfo[ 'author' ]
msg += " Subject : %s\n" % alarmInfo[ 'subject' ]
msg += " Status : %s\n" % alarmInfo[ 'status' ]
msg += " Priority : %s\n" % alarmInfo[ 'priority' ]
msg += " Assignee : %s\n" % alarmInfo[ 'assignee' ]
msg += " Creation date : %s UTC\n" % alarmInfo[ 'creationtime' ].strftime( "%Y/%m/%d %H:%M:%S" )
msg += " Last modificaiton : %s UTC\n" % alarmInfo[ 'modtime' ].strftime( "%Y/%m/%d %H:%M:%S" )
msg += " Body:\n\n%s" % alarmInfo[ 'body' ]
return msg
def __sendMailToUser( self, user, subject, message ):
address = gConfig.getValue( "/Registry/Users/%s/Email" % user, "" )
if not address:
self.log.error( "User does not have an email registered", user )
return S_ERROR( "User %s does not have an email registered" % user )
self.log.info( "Sending mail (%s) to user %s at %s" % ( subject, user, address ) )
m = Mail()
m._subject = "[DIRAC] %s" % subject
m._message = message
m._mailAddress = address
result = m._send()
if not result['OK']:
gLogger.warn( 'Could not send mail with the following message:\n%s' % result['Message'] )
return result
def getAlarms( self, condDict = {}, sortList = False, start = 0, limit = 0, modifiedAfter = None ):
condSQL = []
for field in self.__alarmQueryFields:
if field in condDict:
fieldValues = []
rawValue = condDict[ field ]
if field == 'assignee':
expandedValue = []
for user in rawValue:
result = self.getAssigneeGroupsForUser( user )
if not result[ 'OK' ]:
return result
for ag in result[ 'Value' ]:
if ag not in expandedValue:
expandedValue.append( ag )
rawValue = expandedValue
for value in rawValue:
result = self._escapeString( value )
if not result[ 'OK' ]:
return result
fieldValues.append( result[ 'Value' ] )
condSQL.append( "%s in ( %s )" % ( field, ",".join( fieldValues ) ) )
selSQL = "SELECT %s FROM `ntf_Alarms`" % ",".join( self.__alarmQueryFields )
if modifiedAfter:
condSQL.append( "ModTime >= %s" % modifiedAfter.strftime( "%Y-%m-%d %H:%M:%S" ) )
if condSQL:
selSQL = "%s WHERE %s" % ( selSQL, " AND ".join( condSQL ) )
if sortList:
selSQL += " ORDER BY %s" % ", ".join( [ "%s %s" % ( sort[0], sort[1] ) for sort in sortList ] )
if limit:
selSQL += " LIMIT %d,%d" % ( start, limit )
result = self._query( selSQL )
if not result['OK']:
return result
resultDict = {}
resultDict['ParameterNames'] = self.__alarmQueryFields
resultDict['Records'] = [ list( v ) for v in result['Value'] ]
return S_OK( resultDict )
def getAlarmInfo( self, alarmId ):
result = self.getAlarms( { 'alarmId' : alarmId } )
if not result[ 'OK' ]:
return result
alarmInfo = {}
data = result[ 'Value' ]
if len( data[ 'Records' ] ) == 0:
return S_OK( {} )
for i in range( len( data[ 'ParameterNames' ] ) ):
alarmInfo[ data[ 'ParameterNames' ][i] ] = data[ 'Records' ][0][i]
return S_OK( alarmInfo )
def getAlarmLog( self, alarmId ):
try:
alarmId = int( alarmId )
except:
return S_ERROR( "Alarm id must be a non decimal number" )
sqlSel = "SELECT %s FROM `ntf_AlarmLog` WHERE AlarmId=%d ORDER BY Timestamp ASC" % ( ",".join( self.__alarmLogFields ),
alarmId )
result = self._query( sqlSel )
if not result[ 'OK' ]:
return result
decodedRows = []
for row in result[ 'Value' ]:
decodedRows.append( list( row ) )
if not row[3]:
decodedRows.append( list( row ) )
continue
dec = DEncode.decode( row[ 3 ] )
decodedRows[-1][3] = dec[0]
resultDict = {}
resultDict['ParameterNames'] = self.__alarmLogFields
resultDict['Records'] = decodedRows
return S_OK( resultDict )
###
# Followers management
###
def modifyFollowerForAlarm( self, alarmId, user, notificationsDict, overwrite = True ):
rawUser = user
if rawUser not in Registry.getAllUsers():
return S_OK()
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
user = result[ 'Value' ]
subscriber = False
for k in notificationsDict:
if notificationsDict[ k ]:
subscriber = True
break
selSQL = "SELECT Notification, Mail, SMS FROM `ntf_AlarmFollowers` WHERE AlarmId=%d AND User=%s" % ( alarmId, user )
result = self._query( selSQL )
if not result[ 'OK' ]:
return result
if not result[ 'Value' ]:
if not subscriber:
return S_OK()
sqlValues = [ "%d" % alarmId, user ]
for k in self.__validAlarmNotifications:
if notificationsDict[ k ]:
sqlValues.append( "1" )
else:
sqlValues.append( "0" )
inSQL = "INSERT INTO `ntf_AlarmFollowers` ( AlarmId, User, Notification, Mail, SMS ) VALUES (%s)" % ",".join( sqlValues )
return self._update( inSQL )
sqlCond = "AlarmId=%d AND User=%s" % ( alarmId, user )
#Need to delete
if not subscriber:
return self._update( "DELETE FROM `ntf_AlarmFollowers` WHERE %s" % sqlCond )
if not overwrite:
return S_OK()
#Need to update
modSQL = []
for k in self.__validAlarmNotifications:
if notificationsDict[ k ]:
modSQL.append( "%s=1" % k )
else:
modSQL.append( "%s=0" % k )
return self._update( "UPDATE `ntf_AlarmFollowers` SET %s WHERE %s" % ( modSQL, sqlCond ) )
def getSubscribersForAlarm( self, alarmId ):
selSQL = "SELECT User, Mail, Notification, SMS FROM `ntf_AlarmFollowers` WHERE AlarmId=%d" % alarmId
result = self._query( selSQL )
if not result[ 'OK' ]:
return result
fw = result[ 'Value' ]
followWays = { 'mail' : [], 'notification' : [], 'sms' : [] }
followers = []
for user, mail, Notification, SMS in fw:
if user in followers:
continue
followers.append( user )
if mail:
followWays[ 'mail' ].append( user )
if Notification:
followWays[ 'notification' ].append( user )
if SMS:
followWays[ 'sms' ].append( user )
return S_OK( followWays )
###
# Assignee groups management
###
def getUserAsignees( self, assignee ):
#Check if it is a user
if assignee in Registry.getAllUsers():
return S_OK( [ assignee ] )
result = self._escapeString( assignee )
if not result[ 'OK' ]:
return result
escAG = result[ 'Value' ]
sqlSel = "SELECT User FROM `ntf_AssigneeGroups` WHERE AssigneeGroup = %s" % escAG
result = self._query( sqlSel )
if not result[ 'OK' ]:
return result
users = [ row[0] for row in result[ 'Value' ] ]
if not users:
return S_OK( [] )
return S_OK( users )
def setAssigneeGroup( self, groupName, usersList ):
validUsers = Registry.getAllUsers()
result = self._escapeString( groupName )
if not result[ 'OK' ]:
return result
escGroup = result[ 'Value' ]
sqlSel = "SELECT User FROM `ntf_AssigneeGroups` WHERE AssigneeGroup = %s" % escGroup
result = self._query( sqlSel )
if not result[ 'OK' ]:
return result
currentUsers = [ row[0] for row in result[ 'Value' ] ]
usersToDelete = []
usersToAdd = []
finalUsersInGroup = len( currentUsers )
for user in currentUsers:
if user not in usersList:
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
usersToDelete.append( result[ 'Value' ] )
finalUsersInGroup -= 1
for user in usersList:
if user not in validUsers:
continue
if user not in currentUsers:
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
usersToAdd.append( "( %s, %s )" % ( escGroup, result[ 'Value' ] ) )
finalUsersInGroup += 1
if not finalUsersInGroup:
return S_ERROR( "Group must have at least one user!" )
#Delete old users
if usersToDelete:
sqlDel = "DELETE FROM `ntf_AssigneeGroups` WHERE User in ( %s )" % ",".join( usersToDelete )
result = self._update( sqlDel )
if not result[ 'OK' ]:
return result
#Add new users
if usersToAdd:
sqlInsert = "INSERT INTO `ntf_AssigneeGroups` ( AssigneeGroup, User ) VALUES %s" % ",".join( usersToAdd )
result = self._update( sqlInsert )
if not result[ 'OK' ]:
return result
return S_OK()
def deleteAssigneeGroup( self, groupName ):
result = self._escapeString( groupName )
if not result[ 'OK' ]:
return result
escGroup = result[ 'Value' ]
sqlSel = "SELECT AlarmId FROM `ntf_Alarms` WHERE Assignee=%s" % escGroup
result = self._query( sqlSel )
if not result[ 'OK' ]:
return result
if result[ 'Value' ]:
alarmIds = [ row[0] for row in result[ 'Value' ] ]
return S_ERROR( "There are %s alarms assigned to this group" % len( alarmIds ) )
sqlDel = "DELETE FROM `ntf_AssigneeGroups` WHERE AssigneeGroup=%s" % escGroup
return self._update( sqlDel )
def getAssigneeGroups( self ):
result = self._query( "SELECT AssigneeGroup, User from `ntf_AssigneeGroups` ORDER BY User" )
if not result[ 'OK' ]:
return result
agDict = {}
for row in result[ 'Value' ]:
ag = row[0]
user = row[1]
if ag not in agDict:
agDict[ ag ] = []
agDict[ ag ].append( user )
return S_OK( agDict )
def getAssigneeGroupsForUser( self, user ):
if user not in Registry.getAllUsers():
return S_ERROR( "%s is an unknown user" % user )
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
user = result[ 'Value' ]
result = self._query( "SELECT AssigneeGroup from `ntf_AssigneeGroups` WHERE User=%s" % user )
if not result[ 'OK' ]:
return result
return S_OK( [ row[0] for row in result[ 'Value' ] ] )
###
# Notifications
###
def addNotificationForUser( self, user, message, lifetime = 0, deferToMail = 1 ):
if user not in Registry.getAllUsers():
return S_ERROR( "%s is an unknown user" % user )
self.log.info( "Adding a notification for user %s (msg is %s chars)" % ( user, len( message ) ) )
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
user = result[ 'Value' ]
result = self._escapeString( message )
if not result[ 'OK' ]:
return result
message = result[ 'Value' ]
sqlFields = [ 'User', 'Message', 'Timestamp' ]
sqlValues = [ user, message, 'UTC_TIMESTAMP()' ]
if not deferToMail:
sqlFields.append( "DeferToMail" )
sqlValues.append( "0" )
if lifetime:
sqlFields.append( "Expiration" )
sqlValues.append( "TIMESTAMPADD( SECOND, %d, UTC_TIMESTAMP() )" % int( lifetime ) )
sqlInsert = "INSERT INTO `ntf_Notifications` (%s) VALUES (%s) " % ( ",".join( sqlFields ),
",".join( sqlValues ) )
result = self._update( sqlInsert )
if not result[ 'OK' ]:
return result
return S_OK( result[ 'lastRowId' ] )
def removeNotificationsForUser( self, user, msgIds = False ):
if user not in Registry.getAllUsers():
return S_ERROR( "%s is an unknown user" % user )
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
user = result[ 'Value' ]
delSQL = "DELETE FROM `ntf_Notifications` WHERE User=%s" % user
escapedIDs = []
if msgIds:
for iD in msgIds:
result = self._escapeString( str( iD ) )
if not result[ 'OK' ]:
return result
escapedIDs.append( result[ 'Value' ] )
delSQL = "%s AND Id in ( %s ) " % ( delSQL, ",".join( escapedIDs ) )
return self._update( delSQL )
def markNotificationsSeen( self, user, seen = True, msgIds = False ):
if user not in Registry.getAllUsers():
return S_ERROR( "%s is an unknown user" % user )
result = self._escapeString( user )
if not result[ 'OK' ]:
return result
user = result[ 'Value' ]
if seen:
seen = 1
else:
seen = 0
updateSQL = "UPDATE `ntf_Notifications` SET Seen=%d WHERE User=%s" % ( seen, user )
escapedIDs = []
if msgIds:
for iD in msgIds:
result = self._escapeString( str( iD ) )
if not result[ 'OK' ]:
return result
escapedIDs.append( result[ 'Value' ] )
updateSQL = "%s AND Id in ( %s ) " % ( updateSQL, ",".join( escapedIDs ) )
return self._update( updateSQL )
def getNotifications( self, condDict = {}, sortList = False, start = 0, limit = 0 ):
condSQL = []
for field in self.__notificationQueryFields:
if field in condDict:
fieldValues = []
for value in condDict[ field ]:
result = self._escapeString( value )
if not result[ 'OK' ]:
return result
fieldValues.append( result[ 'Value' ] )
condSQL.append( "%s in ( %s )" % ( field, ",".join( fieldValues ) ) )
eSortList = []
for field, order in sortList:
if order.lower() in [ 'asc', 'desc' ]:
eSortList.append( ( '`%s`' % field.replace( '`', '' ), order ) )
selSQL = "SELECT %s FROM `ntf_Notifications`" % ",".join( self.__notificationQueryFields )
if condSQL:
selSQL = "%s WHERE %s" % ( selSQL, " AND ".join( condSQL ) )
if eSortList:
selSQL += " ORDER BY %s" % ", ".join( [ "%s %s" % ( sort[0], sort[1] ) for sort in eSortList ] )
else:
selSQL += " ORDER BY Id DESC"
if limit:
selSQL += " LIMIT %d,%d" % ( start, limit )
result = self._query( selSQL )
if not result['OK']:
return result
resultDict = {}
resultDict['ParameterNames'] = self.__notificationQueryFields
resultDict['Records'] = [ list( v ) for v in result['Value'] ]
return S_OK( resultDict )
def purgeExpiredNotifications( self ):
self.log.info( "Purging expired notifications" )
delConds = [ '(Seen=1 OR DeferToMail=0)', '(TIMESTAMPDIFF( SECOND, UTC_TIMESTAMP(), Expiration ) < 0 )' ]
delSQL = "DELETE FROM `ntf_Notifications` WHERE %s" % " AND ".join( delConds )
result = self._update( delSQL )
if not result[ 'OK' ]:
return result
self.log.info( "Purged %s notifications" % result[ 'Value' ] )
deferCond = [ 'Seen=0', 'DeferToMail=1', 'TIMESTAMPDIFF( SECOND, UTC_TIMESTAMP(), Expiration ) < 0' ]
selSQL = "SELECT Id, User, Message FROM `ntf_Notifications` WHERE %s" % " AND ".join( deferCond )
result = self._query( selSQL )
if not result[ 'OK' ]:
return result
messages = result[ 'Value' ]
if not messages:
return S_OK()
ids = []
for msg in messages:
self.__sendMailToUser( msg[1], 'Notification defered to mail', msg[2] )
ids.append( str( msg[0] ) )
self.log.info( "Deferred %s notifications" % len( ids ) )
return self._update( "DELETE FROM `ntf_Notifications` WHERE Id in (%s)" % ",".join( ids ) )
|
andresailer/DIRAC
|
FrameworkSystem/DB/NotificationDB.py
|
Python
|
gpl-3.0
| 34,270
|
[
"DIRAC"
] |
ebf8994f822fccf19867b3c5abdb0afac49cf58bb0097b841218a4037552db61
|
import numpy as np
import matplotlib.pyplot as plt
import fitsio
import h5py
from K2pgram import K2pgram, eval_freq, K2pgram_basis
from gatspy.periodic import LombScargle
import fitsio
import glob
import emcee
import scipy.interpolate as spi
import scipy.signal as sps
plotpar = {'axes.labelsize': 10,
'text.fontsize': 8,
'legend.fontsize': 10,
'xtick.labelsize': 10,
'ytick.labelsize': 10,
'text.usetex': True}
plt.rcParams.update(plotpar)
def smoothing(x, y):
f = spi.interp1d(x, y)
xx = np.linspace(min(x), max(x), 1000)
yy = f(xx)
window = sps.gaussian(20, 8)
smoothed = sps.convolve(yy, window/window.sum(), mode='same')
return xx, smoothed
def peak_detect(x, y):
peaks = np.array([i for i in range(1, len(x)-1) if y[i-1] < y[i] and
y[i+1] < y[i]])
l = y[peaks] == max(y[peaks])
return x[peaks], y[peaks], x[peaks][l], y[peaks][l]
def find_modes(fname, eid, nbasis=150, campaign=1, raw=False):
data = fitsio.read(fname)
aps = fitsio.read(fname, 2)
y = data["flux"][:, np.argmin(aps["cdpp6"])]
x = data["time"]
q = data["quality"]
l = np.isfinite(y) * np.isfinite(x) * (q==0)
y, x = y[l], x[l]
y /= np.median(y)
y -= 1
x *= 24*3600 # convert to seconds
# plot raw data
if raw == True:
plt.clf()
model = LombScargle().fit(x, y, np.ones_like(y)*1e-5)
period = 1. / fs
raw_pgram = model.periodogram(period)
plt.plot(fs, raw_pgram, "k")
plt.savefig("astero/raw_%spgram" % eid)
# load basis
with h5py.File("data/c%s.h5" % campaign, "r") as f:
basis = f["basis"][:150, l]
fs = np.arange(10, 300, 4e-2) * 1e-6
amps2, s2n, w = K2pgram(x, y, basis, fs)
# plot our pgram
plt.clf()
fs *= 1e6
plt.plot(fs, s2n, "k")
plt.xlabel("$\mathrm{Frequency~(}\mu \mathrm{Hz)}$")
plt.ylabel("$\mathrm{Power}$")
plt.savefig("astero/%sastero_pgram" % eid)
# save pgram
np.savetxt("astero/%sastero_pgram.txt" % eid, np.transpose((fs, s2n)))
def find_delta_nu(fs, s2n, eid, width, sub=1, truths=None, smooth=False):
if truths:
dnu, nm = truths
fps = width * len(fs)
df = (fs[1] - fs[0]) # frequency lag in uHz
pos = np.arange(len(fs)-fps)[::sub] # the position of each section
acor = np.zeros((fps, len(pos)))
for i in range(len(pos)):
acor[:, i] = emcee.autocorr.function(s2n[i*sub:fps+(i*sub)])
lags = np.arange(fps)*df
plt.clf()
plt.subplot(3, 1, 1)
plt.axvline(nm, color="r", linestyle="--")
plt.plot(fs*1e6, s2n, "k")
plt.xlim(min(fs*1e6), max(fs*1e6))
# plt.ylim(0, max(s2n[fs > 10]))
plt.xlabel("$\mathrm{Frequency~(}\mu \mathrm{Hz)}$")
plt.ylabel("$\mathrm{Power}$")
plt.yticks(visible=False)
plt.subplot(3, 1, 2)
plt.imshow(acor, cmap="gray_r", interpolation="nearest",
aspect="auto", vmin=0, vmax=.3)
plt.subplots_adjust(hspace=.35)
plt.xticks(visible=False)
plt.yticks(visible=False)
plt.ylabel("$\Delta \\nu$")
plt.xlabel("$\\nu_{max}~\mathrm{location}$")
collapsed_acf = np.sum(acor, axis=1)
# cut of first part of the acf (dnu won't be smaller than 8)
l = lags*1e6 > 8
lags, collapsed_acf = lags[l], collapsed_acf[l]
plt.subplot(3, 1, 3)
if len(lags) != len(collapsed_acf):
lags = lags[:-1]
plt.ylim(min(collapsed_acf), max(collapsed_acf))
plt.xlabel("$\Delta \\nu~\mathrm{(}\mu\mathrm{Hz)}$")
plt.ylabel("$\mathrm{Correlation}$")
# smooth acf
if smooth == True:
print "smoothing acf"
print lags, collapsed_acf
print len(lags), len(collapsed_acf)
assert 0
smoothx, smoothy = smoothing(lags, collapsed_acf)
x_peaks, y_peaks, mx, my = peak_detect(smoothx, smoothy)
plt.plot(smoothx*1e6, smoothy)
else:
x_peaks, y_peaks, mx, my = peak_detect(lags, collapsed_acf)
plt.plot(lags*1e6, collapsed_acf, "k")
plt.axvline(mx*1e6, color="k", alpha=.3, linestyle="--",
label="$%.2f~\mu\mathrm{Hz}$" % (mx[0]*1e6))
plt.axvline(dnu, color="r", linestyle="--")
plt.legend()
# plt.ylim(min(collapsed_acf), my)
# plt.ylim(-100, 200)
plt.savefig("astero/%s_dnu" % eid)
return mx[0], my[0], lags, pos, collapsed_acf, np.sum(acor, axis=0)
|
RuthAngus/K2rotation
|
find_delta_nu.py
|
Python
|
mit
| 4,393
|
[
"Gaussian"
] |
40f02bc461403dfc26a16a0b5d77feca6d38f9ea323931d6915bfcb8bede8fea
|
"""
This module provides a way to pass information between passes as metadata.
* add attaches a metadata to a node
* get retrieves all metadata from a particular class attached to a node
"""
from gast import AST # so that metadata are walkable as regular ast nodes
class Metadata(AST):
""" Base class to add information on a node to improve code generation. """
def __init__(self):
""" Initialize content of these metadata. """
self.data = list()
self._fields = ('data',)
super(Metadata, self).__init__()
def __iter__(self):
""" Enable iteration over every metadata informations. """
return iter(self.data)
def append(self, data):
""" Add a metadata information. """
self.data.append(data)
class Lazy(AST):
""" Metadata to mark variable which doesn't need to be evaluated now. """
class Comprehension(AST):
def __init__(self, *args): # no positional argument to be deep copyable
super(Comprehension, self).__init__()
if args:
self.target = args[0]
class StaticReturn(AST):
""" Metadata to mark return with a constant value. """
class Local(AST):
""" Metadata to mark function as non exported. """
def add(node, data):
if not hasattr(node, 'metadata'):
node.metadata = Metadata()
node._fields += ('metadata',)
node.metadata.append(data)
def get(node, class_):
if hasattr(node, 'metadata'):
return [s for s in node.metadata if isinstance(s, class_)]
else:
return []
def clear(node, class_):
if hasattr(node, 'metadata'):
node.metadata.data = [s for s in node.metadata
if not isinstance(s, class_)]
if not node.metadata.data:
del node.metadata
assert node._fields[-1] == 'metadata'
node._fields = node._fields[:-1]
def visit(self, node):
if hasattr(node, 'metadata'):
self.visit(node.metadata)
|
pombredanne/pythran
|
pythran/metadata.py
|
Python
|
bsd-3-clause
| 1,989
|
[
"VisIt"
] |
67ce0b82d228f51be21ccb087dbbe85d1b51c152c8a81bd3fc3b8bf5a512fccb
|
from apiclient import discovery
from apiclient import model
import json
import Image
import os
api_key = open(os.environ['HOME'] + "/.freebase_api_key").read()
model.JsonModel.alt_param = ""
freebase = discovery.build('freebase', 'v1sandbox', developerKey=api_key)
response = freebase.image(id='/en/espresso').execute()
im = Image.open(response)
im.save('image.jpg', "JPEG")
|
tfmorris/freebase-python-samples
|
client-library/image.py
|
Python
|
bsd-3-clause
| 379
|
[
"ESPResSo"
] |
2f4d5fec3b3ea132c0aa74ecc9034e92e87fe7dd341b18bdc60ea7d03d97cae8
|
import operator
from six import ensure_text, iteritems, iterkeys, text_type
from ..node import NodeVisitor, DataNode, ConditionalNode, KeyValueNode, ListNode, ValueNode, BinaryExpressionNode, VariableNode
from ..parser import parse
class ConditionalValue(object):
def __init__(self, node, condition_func):
self.node = node
assert callable(condition_func)
self.condition_func = condition_func
if isinstance(node, ConditionalNode):
assert len(node.children) == 2
self.condition_node = self.node.children[0]
assert isinstance(node.children[1], (ValueNode, ListNode))
self.value_node = self.node.children[1]
else:
assert isinstance(node, (ValueNode, ListNode))
self.condition_node = None
self.value_node = self.node
@property
def value(self):
if isinstance(self.value_node, ValueNode):
return self.value_node.data
else:
return [item.data for item in self.value_node.children]
@value.setter
def value(self, value):
if isinstance(self.value_node, ValueNode):
self.value_node.data = value
else:
assert(isinstance(self.value_node, ListNode))
while self.value_node.children:
self.value_node.children[0].remove()
assert len(self.value_node.children) == 0
for list_value in value:
self.value_node.append(ValueNode(list_value))
def __call__(self, run_info):
return self.condition_func(run_info)
def set_value(self, value):
self.value = ensure_text(value)
def value_as(self, type_func):
"""Get value and convert to a given type.
This is unfortunate, but we don't currently have a good way to specify that
specific properties should have their data returned as specific types"""
value = self.value
if type_func is not None:
value = type_func(value)
return value
def remove(self):
if len(self.node.parent.children) == 1:
self.node.parent.remove()
self.node.remove()
@property
def variables(self):
rv = set()
if self.condition_node is None:
return rv
stack = [self.condition_node]
while stack:
node = stack.pop()
if isinstance(node, VariableNode):
rv.add(node.data)
for child in reversed(node.children):
stack.append(child)
return rv
class Compiler(NodeVisitor):
def compile(self, tree, data_cls_getter=None, **kwargs):
"""Compile a raw AST into a form where conditional expressions
are represented by ConditionalValue objects that can be evaluated
at runtime.
tree - The root node of the wptmanifest AST to compile
data_cls_getter - A function taking two parameters; the previous
output node and the current ast node and returning
the class of the output node to use for the current
ast node
"""
if data_cls_getter is None:
self.data_cls_getter = lambda x, y: ManifestItem
else:
self.data_cls_getter = data_cls_getter
self.tree = tree
self.output_node = self._initial_output_node(tree, **kwargs)
self.visit(tree)
if hasattr(self.output_node, "set_defaults"):
self.output_node.set_defaults()
assert self.output_node is not None
return self.output_node
def compile_condition(self, condition):
"""Compile a ConditionalNode into a ConditionalValue.
condition: A ConditionalNode"""
data_node = DataNode()
key_value_node = KeyValueNode()
key_value_node.append(condition.copy())
data_node.append(key_value_node)
manifest_item = self.compile(data_node)
return manifest_item._data[None][0]
def _initial_output_node(self, node, **kwargs):
return self.data_cls_getter(None, None)(node, **kwargs)
def visit_DataNode(self, node):
if node != self.tree:
output_parent = self.output_node
self.output_node = self.data_cls_getter(self.output_node, node)(node)
else:
output_parent = None
assert self.output_node is not None
for child in node.children:
self.visit(child)
if output_parent is not None:
# Append to the parent *after* processing all the node data
output_parent.append(self.output_node)
self.output_node = self.output_node.parent
assert self.output_node is not None
def visit_KeyValueNode(self, node):
key_values = []
for child in node.children:
condition, value = self.visit(child)
key_values.append(ConditionalValue(child, condition))
self.output_node._add_key_value(node, key_values)
def visit_ListNode(self, node):
return (lambda x:True, [self.visit(child) for child in node.children])
def visit_ValueNode(self, node):
return (lambda x: True, node.data)
def visit_AtomNode(self, node):
return (lambda x: True, node.data)
def visit_ConditionalNode(self, node):
return self.visit(node.children[0]), self.visit(node.children[1])
def visit_StringNode(self, node):
indexes = [self.visit(child) for child in node.children]
def value(x):
rv = node.data
for index in indexes:
rv = rv[index(x)]
return rv
return value
def visit_NumberNode(self, node):
if "." in node.data:
return lambda x: float(node.data)
else:
return lambda x: int(node.data)
def visit_VariableNode(self, node):
indexes = [self.visit(child) for child in node.children]
def value(x):
data = x[node.data]
for index in indexes:
data = data[index(x)]
return data
return value
def visit_IndexNode(self, node):
assert len(node.children) == 1
return self.visit(node.children[0])
def visit_UnaryExpressionNode(self, node):
assert len(node.children) == 2
operator = self.visit(node.children[0])
operand = self.visit(node.children[1])
return lambda x: operator(operand(x))
def visit_BinaryExpressionNode(self, node):
assert len(node.children) == 3
operator = self.visit(node.children[0])
operand_0 = self.visit(node.children[1])
operand_1 = self.visit(node.children[2])
assert operand_0 is not None
assert operand_1 is not None
return lambda x: operator(operand_0(x), operand_1(x))
def visit_UnaryOperatorNode(self, node):
return {"not": operator.not_}[node.data]
def visit_BinaryOperatorNode(self, node):
assert isinstance(node.parent, BinaryExpressionNode)
return {"and": operator.and_,
"or": operator.or_,
"==": operator.eq,
"!=": operator.ne}[node.data]
class ManifestItem(object):
def __init__(self, node=None, **kwargs):
self.node = node
self.parent = None
self.children = []
self._data = {}
def __repr__(self):
return "<conditional.ManifestItem %s>" % (self.node.data)
def __str__(self):
rv = [repr(self)]
for item in self.children:
rv.extend(" %s" % line for line in str(item).split("\n"))
return "\n".join(rv)
def __contains__(self, key):
return key in self._data
def __iter__(self):
yield self
for child in self.children:
for node in child:
yield node
@property
def is_empty(self):
if self._data:
return False
return all(child.is_empty for child in self.children)
@property
def root(self):
node = self
while node.parent is not None:
node = node.parent
return node
@property
def name(self):
return self.node.data
def has_key(self, key):
for node in [self, self.root]:
if key in node._data:
return True
return False
def get(self, key, run_info=None):
if run_info is None:
run_info = {}
for node in [self, self.root]:
if key in node._data:
for cond_value in node._data[key]:
try:
matches = cond_value(run_info)
except KeyError:
matches = False
if matches:
return cond_value.value
raise KeyError
def set(self, key, value, condition=None):
# First try to update the existing value
if key in self._data:
cond_values = self._data[key]
for cond_value in cond_values:
if cond_value.condition_node == condition:
cond_value.value = value
return
# If there isn't a conditional match reuse the existing KeyValueNode as the
# parent
node = None
for child in self.node.children:
if child.data == key:
node = child
break
assert node is not None
else:
node = KeyValueNode(key)
self.node.append(node)
if isinstance(value, list):
value_node = ListNode()
for item in value:
value_node.append(ValueNode(text_type(item)))
else:
value_node = ValueNode(text_type(value))
if condition is not None:
if not isinstance(condition, ConditionalNode):
conditional_node = ConditionalNode()
conditional_node.append(condition)
conditional_node.append(value_node)
else:
conditional_node = condition
node.append(conditional_node)
cond_value = Compiler().compile_condition(conditional_node)
else:
node.append(value_node)
cond_value = ConditionalValue(value_node, lambda x: True)
# Update the cache of child values. This is pretty annoying and maybe
# it should just work directly on the tree
if key not in self._data:
self._data[key] = []
if self._data[key] and self._data[key][-1].condition_node is None:
self._data[key].insert(len(self._data[key]) - 1, cond_value)
else:
self._data[key].append(cond_value)
def clear(self, key):
"""Clear all the expected data for this node"""
if key in self._data:
for child in self.node.children:
if (isinstance(child, KeyValueNode) and
child.data == key):
child.remove()
del self._data[key]
break
def get_conditions(self, property_name):
if property_name in self._data:
return self._data[property_name]
return []
def _add_key_value(self, node, values):
"""Called during construction to set a key-value node"""
self._data[node.data] = values
def append(self, child):
self.children.append(child)
child.parent = self
if child.node.parent != self.node:
self.node.append(child.node)
return child
def remove(self):
if self.parent:
self.parent._remove_child(self)
def _remove_child(self, child):
self.children.remove(child)
child.parent = None
child.node.remove()
def iterchildren(self, name=None):
for item in self.children:
if item.name == name or name is None:
yield item
def _flatten(self):
rv = {}
for node in [self, self.root]:
for name, value in iteritems(node._data):
if name not in rv:
rv[name] = value
return rv
def iteritems(self):
for item in iteritems(self._flatten()):
yield item
def iterkeys(self):
for item in iterkeys(self._flatten()):
yield item
def iter_properties(self):
for item in self._data:
yield item, self._data[item]
def remove_value(self, key, value):
if key not in self._data:
return
try:
self._data[key].remove(value)
except ValueError:
return
if not self._data[key]:
del self._data[key]
value.remove()
def compile_ast(ast, data_cls_getter=None, **kwargs):
return Compiler().compile(ast, data_cls_getter=data_cls_getter, **kwargs)
def compile(stream, data_cls_getter=None, **kwargs):
return compile_ast(parse(stream),
data_cls_getter=data_cls_getter,
**kwargs)
|
UK992/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/wptmanifest/backends/conditional.py
|
Python
|
mpl-2.0
| 13,137
|
[
"VisIt"
] |
97408b4678a539fb994cacc896c1d166c6b97371598590a41968bc6c55392b56
|
import hashlib
import json
import os
import shutil
import tempfile
import traceback
import zlib
from typing import Dict, List
from urllib.parse import unquote
from fsbc.paths import Paths
from fsbc.task import TaskFailure, current_task
from fsbc.util import is_sha1
from fscore.resources import Resources
from fsgamesys.amiga.adffile import ADFFile
from fsgamesys.amiga.amiga import Amiga
from fsgamesys.amiga.amigaconstants import AmigaConstants
from fsgamesys.amiga.configwriter import ConfigWriter
from fsgamesys.amiga.fsuae import FSUAE
from fsgamesys.amiga.rommanager import ROMManager
from fsgamesys.amiga.roms import CD32_FMV_ROM, PICASSO_IV_74_ROM
from fsgamesys.amiga.workbenchdata import workbench_disks_with_setpatch_39_6
from fsgamesys.amiga.workbenchextractor import WorkbenchExtractor
# from fsgamesys.amiga.xpkmaster import install_xpkmaster_files
from fsgamesys.archive import Archive
from fsgamesys.download import Downloader
from fsgamesys.drivers.gamedriver import GameDriver
from fsgamesys.FSGSDirectories import FSGSDirectories
from fsgamesys.GameChangeHandler import GameChangeHandler
from fsgamesys.knownfiles import (
ACTION_REPLAY_MK_II_2_14_MOD_ROM,
ACTION_REPLAY_MK_II_2_14_ROM,
ACTION_REPLAY_MK_III_3_17_MOD_ROM,
ACTION_REPLAY_MK_III_3_17_ROM,
)
from fsgamesys.network import is_http_url
from fsgamesys.options.option import Option
from fsgamesys.res import gettext
from fsgamesys.util.gamenameutil import GameNameUtil
# FIXME: Support relative_temp_feature for unpacked archives-as-HD as well
class LaunchHandler(object):
def __init__(self, fsgs, config_name, config, game_paths, temp_dir=""):
self.fsgs = fsgs
self.fsgc = fsgs
self.config_name = config_name
self.config = config.copy()
for remove_key in [
"database_username",
"database_password",
"database_username",
"database_email",
"database_auth",
"device_id",
]:
if remove_key in self.config:
del self.config[remove_key]
# make sure FS-UAE does not load other config files (Host.fs-uae)
self.config["end_config"] = "1"
self.game_paths = game_paths
self.hd_requirements = set()
for req in (
self.config.get("hd_requirements", "").replace(",", ";").split(";")
):
req = req.strip()
if req:
self.hd_requirements.add(req)
self.setpatch_installed = False
# self.stop_flag = False
self.temp_dir = temp_dir
self.change_handler = None
self.use_relative_paths = (
self.config.get(Option.RELATIVE_TEMP_FEATURE, "") == "1"
)
@property
def stop_flag(self):
return current_task.stop_flag
def on_progress(self, progress):
# method can be overridden / replaced in instances
pass
def on_complete(self):
# method can be overridden / replaced in instances
pass
def prepare(self):
print("LaunchHandler.prepare")
if not self.temp_dir:
self.temp_dir = tempfile.mkdtemp(prefix="fs-uae-")
print("temp dir", self.temp_dir)
self.config["floppies_dir"] = self.temp_dir
print("state dir", self.get_state_dir())
self.config["state_dir"] = self.get_state_dir()
self.config["save_states_dir"] = ""
self.config["floppy_overlays_dir"] = ""
self.config["flash_memory_dir"] = ""
# FIXME: Document or change. Tests needs to be able to disable
# saving changes or otherwise be able to start with a clean slate.
# FIXME: Change handler is disabled for now
# if self.config.get("__save_dir", "") == "0":
# pass
# else:
# self.change_handler = GameChangeHandler(self.temp_dir)
self.config["cdroms_dir"] = FSGSDirectories.get_cdroms_dir()
self.config[
"configurations_dir"
] = FSGSDirectories.get_configurations_dir()
self.config["controllers_dir"] = FSGSDirectories.get_controllers_dir()
self.config["hard_drives_dir"] = FSGSDirectories.get_hard_drives_dir()
self.config["kickstarts_dir"] = FSGSDirectories.get_kickstarts_dir()
self.config["save_states_dir"] = FSGSDirectories.get_save_states_dir()
self.config["themes_dir"] = FSGSDirectories.get_themes_dir()
# self.prepare_roms()
if self.stop_flag:
return
self.prepare_floppies()
if self.stop_flag:
return
self.prepare_cdroms()
if self.stop_flag:
return
# self.prepare_hard_drives()
if self.stop_flag:
return
# self.copy_hd_files()
if self.stop_flag:
return
self.init_changes()
if self.stop_flag:
return
self.prepare_theme()
if self.stop_flag:
return
self.prepare_extra_settings()
def run_sequence(self, start=True, cleanup=True):
print("LaunchHandler.run_sequence")
self.prepare()
if not self.stop_flag:
# too late to stop now...
if start:
self.run()
self.update_changes()
if cleanup:
self.cleanup()
print("calling LaunchHandler.on_complete")
self.on_complete()
def prepare_roms(self):
print("LaunchHandler.prepare_roms")
current_task.set_progress(gettext("Preparing kickstart ROMs..."))
amiga_model = self.config.get("amiga_model", "A500")
model_config = Amiga.get_model_config(amiga_model)
roms = [("kickstart_file", model_config["kickstarts"])]
if self.config["kickstart_ext_file"] or model_config["ext_roms"]:
# not all Amigas have extended ROMs
roms.append(("kickstart_ext_file", model_config["ext_roms"]))
if amiga_model.lower() == "cd32/fmv":
roms.append(("fvm_rom", [CD32_FMV_ROM]))
if self.config["graphics_card"].lower().startswith("picasso-iv"):
roms.append(("graphics_card_rom", [PICASSO_IV_74_ROM]))
if self.config["accelerator"].lower() == "cyberstorm-ppc":
roms.append(("accelerator_rom", ["cyberstormppc.rom"]))
if self.config["freezer_cartridge"] == "action-replay-2":
# Ideally, we would want to recognize ROMs based on zeroing the
# first four bytes, but right now we simply recognize a common
# additional version. freezer_cartridge_rom isn't a real option,
# we just want to copy the rom file and let FS-UAE find it
roms.append(
(
"[freezer_cartridge]",
[
ACTION_REPLAY_MK_II_2_14_ROM.sha1,
ACTION_REPLAY_MK_II_2_14_MOD_ROM.sha1,
],
)
)
elif self.config["freezer_cartridge"] == "action-replay-3":
roms.append(
(
"[freezer_cartridge]",
[
ACTION_REPLAY_MK_III_3_17_ROM.sha1,
ACTION_REPLAY_MK_III_3_17_MOD_ROM.sha1,
],
)
)
use_temp_kickstarts_dir = False
for config_key, default_roms in roms:
print("[ROM]", config_key, default_roms)
src = self.config[config_key]
print("[ROM]", src)
if not src:
for sha1 in default_roms:
print("[ROM] Trying", sha1)
if is_sha1(sha1):
rom_src = self.fsgs.file.find_by_sha1(sha1)
if rom_src:
src = rom_src
print("[ROM] Found", rom_src)
break
else:
# roms_dir = FSGSDirectories.get_kickstarts_dir()
# src = os.path.join(roms_dir, sha1)
# if os.path.exists(src):
# break
# loop up file in roms dir instead
src = sha1
elif src == "internal":
continue
elif src:
src = Paths.expand_path(src)
if not src:
raise TaskFailure(
gettext(
"Did not find required Kickstart or "
"ROM for {}. Wanted one of these files: {}".format(
config_key, repr(default_roms)
)
)
)
dest = os.path.join(self.temp_dir, os.path.basename(src))
def lookup_rom_from_src(src):
parts = src.split(":", 1)
if len(parts) == 2 and len(parts[0]) > 1:
# src has a scheme (not a Windows drive letter). Assume
# we can find this file.
return src
archive = Archive(src)
if archive.exists(src):
return src
dirs = [self.fsgs.amiga.get_kickstarts_dir()]
for dir_ in dirs:
path = os.path.join(dir_, src)
print("[ROM] Checking", repr(path))
archive = Archive(path)
if archive.exists(path):
return path
return None
org_src = src
src = lookup_rom_from_src(src)
if not src and org_src == "cyberstormppc.rom":
src = lookup_rom_from_src(
"ralphschmidt-cyberstorm-ppc-4471.rom"
)
if not src:
for (
dir_
) in FSGSDirectories.get_amiga_forever_directories():
path = os.path.join(
dir_,
"Shared",
"rom",
"ralphschmidt-cyberstorm-ppc-4471.rom",
)
if os.path.exists(path):
src = path
print("[ROM] Found", path)
break
else:
print("[ROM] Trying", path)
stream = None
# FIXME: prepare_roms should be rewritten, it's kind of crap.
# Rom patching and decryption should be handled differently. Should
# use file database filters, and decryption via rom.key should only
# be supported when using uncompressed files directly on disk.
if not src or not os.path.exists(src):
try:
stream = self.fsgs.file.open(src)
if stream is None:
raise FileNotFoundError(src)
except FileNotFoundError:
raise TaskFailure(
gettext(
"Cannot find required ROM "
"file: {name}".format(name=repr(org_src))
)
)
with open(dest, "wb") as f:
if stream:
print("[ROM] From stream => {}".format(dest))
rom = {}
rom["data"] = stream.read()
rom["sha1"] = hashlib.sha1(rom["data"]).hexdigest()
ROMManager.patch_rom(rom)
f.write(rom["data"])
else:
archive = Archive(src)
ROMManager.decrypt_archive_rom(archive, src, file=f)
if use_temp_kickstarts_dir:
self.config[config_key] = os.path.basename(src)
else:
self.config[config_key] = dest
if use_temp_kickstarts_dir:
self.config["kickstarts_dir"] = self.temp_dir
@staticmethod
def expand_default_path(src, default_dir):
if "://" in src:
return src, None
src = Paths.expand_path(src, default_dir)
archive = Archive(src)
# if not archive.exists(src):
# dirs = [default_dir]
# for dir in dirs:
# path = os.path.join(dir, src)
# print("checking", repr(path))
# archive = Archive(path)
# if archive.exists(path):
# #if os.path.exists(path):
# src = path
# break
# else:
# raise Exception("Cannot find path for " + repr(src))
return src, archive
def prepare_floppy(self, key):
src = self.config.get(key, "").strip()
if not src:
return
src, archive = self.expand_default_path(
src, self.fsgs.amiga.get_floppies_dir()
)
dst_name = os.path.basename(src)
current_task.set_progress(dst_name)
if self.config["writable_floppy_images"] == "1" and os.path.isfile(
src
):
# the config value directly refers to a local file, and the config
# value already refers to the file, but since we may have
# changed floppy_dir and the path may be relative, we set the
# resolved path directly
self.config[key] = src
else:
dst = os.path.join(self.temp_dir, dst_name)
self.fsgs.file.copy_game_file(src, dst)
self.config[key] = os.path.basename(dst)
def prepare_floppies(self):
print("LaunchHandler.copy_floppies")
current_task.set_progress(gettext("Preparing floppy images..."))
# self.on_progress(gettext("Preparing floppy images..."))
floppies = []
for i in range(Amiga.MAX_FLOPPY_DRIVES):
key = "floppy_drive_{0}".format(i)
if self.config.get(key, ""):
floppies.append(self.config[key])
self.prepare_floppy(key)
for i in range(Amiga.MAX_FLOPPY_IMAGES):
key = "floppy_image_{0}".format(i)
if self.config.get(key, ""):
break
else:
print("floppy image list is empty")
for j, floppy in enumerate(floppies):
self.config["floppy_image_{0}".format(j)] = floppy
max_image = -1
for i in range(Amiga.MAX_FLOPPY_IMAGES):
key = "floppy_image_{0}".format(i)
self.prepare_floppy(key)
if self.config.get(key, ""):
max_image = i
save_image = max_image + 1
if self.config.get("save_disk", "") != "0":
s = Resources("fsgamesys").stream("amiga/adf_save_disk.dat")
data = s.read()
data = zlib.decompress(data)
save_disk_sha1 = hashlib.sha1(data).hexdigest()
# save_disk = os.path.join(self.temp_dir, "Save Disk.adf")
save_disk = os.path.join(
self.temp_dir, save_disk_sha1[:8].upper() + ".adf"
)
with open(save_disk, "wb") as f:
f.write(data)
self.config[f"floppy_image_{save_image}"] = save_disk
self.config[f"floppy_image_{save_image}_label"] = "Save Disk"
def prepare_cdroms(self):
print("LaunchHandler.prepare_cdroms")
if not self.config.get("cdrom_drive_count", ""):
if self.config.get("cdrom_drive_0", "") or self.config.get(
"cdrom_image_0", ""
):
self.config["cdrom_drive_count"] = "1"
cdrom_drive_0 = self.config.get("cdrom_drive_0", "")
if cdrom_drive_0.startswith("game:"):
scheme, dummy, game_uuid, name = cdrom_drive_0.split("/")
file_list = self.get_file_list_for_game_uuid(game_uuid)
for file_item in file_list:
src = self.fsgs.file.find_by_sha1(file_item["sha1"])
src, archive = self.expand_default_path(
src, self.fsgs.amiga.get_cdroms_dir()
)
dst_name = file_item["name"]
current_task.set_progress(dst_name)
dst = os.path.join(self.temp_dir, dst_name)
self.fsgs.file.copy_game_file(src, dst)
cue_sheets = self.get_cue_sheets_for_game_uuid(game_uuid)
for cue_sheet in cue_sheets:
# FIXME: Try to get this to work with the PyCharm type checker
# noinspection PyTypeChecker
with open(
os.path.join(self.temp_dir, cue_sheet["name"]), "wb"
) as f:
# noinspection PyTypeChecker
f.write(cue_sheet["data"].encode("UTF-8"))
for i in range(Amiga.MAX_CDROM_DRIVES):
key = "cdrom_drive_{0}".format(i)
value = self.config.get(key, "")
if value:
self.config[key] = os.path.join(
self.temp_dir, os.path.basename(value)
)
for i in range(Amiga.MAX_CDROM_IMAGES):
key = "cdrom_image_{0}".format(i)
value = self.config.get(key, "")
if value:
self.config[key] = os.path.join(
self.temp_dir, os.path.basename(value)
)
cdroms = []
for i in range(Amiga.MAX_CDROM_DRIVES):
key = "cdrom_drive_{0}".format(i)
if self.config.get(key, ""):
cdroms.append(self.config[key])
for i in range(Amiga.MAX_CDROM_IMAGES):
key = "cdrom_image_{0}".format(i)
if self.config.get(key, ""):
break
else:
print("CD-ROM image list is empty")
for j, cdrom in enumerate(cdroms):
self.config["cdrom_image_{0}".format(j)] = cdrom
def prepare_hard_drives(self):
print("LaunchHandler.prepare_hard_drives")
current_task.set_progress(gettext("Preparing hard drives..."))
# self.on_progress(gettext("Preparing hard drives..."))
for i in range(0, Amiga.MAX_HARD_DRIVES):
self.prepare_hard_drive(i)
def prepare_hard_drive(self, index):
key = "hard_drive_{}".format(index)
src = self.config.get(key, "")
dummy, ext = os.path.splitext(src)
ext = ext.lower()
if is_http_url(src):
name = src.rsplit("/", 1)[-1]
name = unquote(name)
self.on_progress(gettext("Downloading {0}...".format(name)))
dest = os.path.join(self.temp_dir, name)
Downloader.install_file_from_url(src, dest)
src = dest
elif src.startswith("hd://game/"):
self.unpack_game_hard_drive(index, src)
self.disable_save_states()
return
elif src.startswith("file_list:"):
self.unpack_game_hard_drive(index, src)
self.disable_save_states()
return
elif src.startswith("hd://template/workbench/"):
self.prepare_workbench_hard_drive(index, src)
self.disable_save_states()
return
elif src.startswith("hd://template/empty/"):
self.prepare_empty_hard_drive(index, src)
self.disable_save_states()
return
if ext in Archive.extensions:
print("zipped hard drive", src)
self.unpack_hard_drive(index, src)
self.disable_save_states()
elif src.endswith("HardDrive"):
print("XML-described hard drive", src)
self.unpack_hard_drive(index, src)
self.disable_save_states()
else:
src = Paths.expand_path(src)
self.config[key] = src
def disable_save_states(self):
# Save states cannot currently be used with temporarily created
# hard drives, as HD paths are embedded into the save states, and
# restoring the save state causes problems.
if self.config.get("unsafe_save_states") == "1":
return
self.config["save_states"] = "0"
def prepare_workbench_hard_drive(self, i, src):
# dir_name = "DH{0}".format(i)
dir_name = src.rsplit("/", 1)[-1]
dir_path = os.path.join(self.temp_dir, dir_name)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
amiga_model = self.config.get("amiga_model", "A500")
if (
amiga_model.startswith("A1200")
or amiga_model.startswith("A4000")
or amiga_model.startswith("A3000")
):
workbench = "Minimal Workbench v3.1"
elif amiga_model == "A600":
workbench = "Minimal Workbench v2.05"
elif amiga_model == "A500+":
workbench = "Minimal Workbench v2.04"
else:
workbench = "Minimal Workbench v1.3"
print("Try to find pre-configured hard drive", workbench)
src_dir = os.path.join(
self.fsgs.amiga.get_hard_drives_dir(), workbench
)
if src_dir and os.path.exists(src_dir):
print("found", src_dir)
self.copy_folder_tree(src_dir, dir_path)
else:
print(" - not found -")
raise Exception(
"Did not found pre-configured hard drive " + repr(workbench)
)
self.config["hard_drive_{0}".format(i)] = dir_path
def prepare_empty_hard_drive(self, i, src):
dir_name = src.rsplit("/", 1)[-1]
# dir_name = "DH{0}".format(i)
dir_path = os.path.join(self.temp_dir, dir_name)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
self.config["hard_drive_{0}".format(i)] = dir_path
def get_file_list_for_game_uuid(self, game_uuid):
# FIXME: This is an ugly hack, we should already be told what
# database to use.
try:
game_database = self.fsgs.get_game_database()
values = game_database.get_game_values_for_uuid(game_uuid)
except LookupError:
try:
game_database = self.fsgs.game_database("CD32")
values = game_database.get_game_values_for_uuid(game_uuid)
except LookupError:
game_database = self.fsgs.game_database("CDTV")
values = game_database.get_game_values_for_uuid(game_uuid)
file_list = json.loads(values["file_list"])
return file_list
def get_cue_sheets_for_game_uuid(self, game_uuid) -> List[Dict]:
# FIXME: This is an ugly hack, we should already be told what
# database to use.
try:
game_database = self.fsgs.get_game_database()
values = game_database.get_game_values_for_uuid(game_uuid)
except LookupError:
try:
game_database = self.fsgs.game_database("CD32")
values = game_database.get_game_values_for_uuid(game_uuid)
except LookupError:
game_database = self.fsgs.game_database("CDTV")
values = game_database.get_game_values_for_uuid(game_uuid)
if not values.get("cue_sheets", ""):
return []
return json.loads(values["cue_sheets"])
def unpack_game_hard_drive(self, drive_index, src):
print("unpack_game_hard_drive", drive_index, src)
if src.startswith("file_list:"):
_scheme, dummy, drive = src.split("/")
file_list = json.loads(self.fsgs.config.get("file_list"))
else:
_scheme, dummy, dummy, game_uuid, drive = src.split("/")
file_list = self.get_file_list_for_game_uuid(game_uuid)
drive_prefix = drive + "/"
dir_name = "DH{0}".format(drive_index)
dir_path = os.path.join(self.temp_dir, dir_name)
for file_entry in file_list:
if self.stop_flag:
return
name = file_entry["name"]
if not name.startswith(drive_prefix):
continue
# extract Amiga relative path and convert each path component
# to host file name (where needed).
# amiga_rel_path = name[len(drive_prefix) :]
# print("amiga_rel_path", amiga_rel_path)
# amiga_rel_parts = amiga_rel_path.split("/")
# for i, part in enumerate(amiga_rel_parts):
# # part can be blank if amiga_rel_parts is a directory
# # (ending with /)
# if part:
# amiga_rel_parts[i] = amiga_filename_to_host_filename(part)
# amiga_rel_path = "/".join(amiga_rel_parts)
amiga_rel_path = amiga_path_to_host_path(name[len(drive_prefix) :])
dst_file = os.path.join(dir_path, amiga_rel_path)
print(repr(dst_file))
if name.endswith("/"):
os.makedirs(dst_file)
continue
if not os.path.exists(os.path.dirname(dst_file)):
os.makedirs(os.path.dirname(dst_file))
sha1 = file_entry["sha1"]
# current_task.set_progress(os.path.basename(dst_file))
current_task.set_progress(amiga_rel_path)
self.fsgs.file.copy_game_file("sha1://{0}".format(sha1), dst_file)
# src_file = self.fsgs.file.find_by_sha1(sha1)
# if not os.path.exists(os.path.dirname(dst_file)):
# os.makedirs(os.path.dirname(dst_file))
# stream = self.fsgs.file.open(src_file)
# # archive = Archive(src_file)
# # f = archive.open(src_file)
# data = stream.read()
# assert hashlib.sha1(data).hexdigest() == sha1
# with open(dst_file, "wb") as out_file:
# out_file.write(data)
# noinspection SpellCheckingInspection
metadata = [
"----rwed",
" ",
"2000-01-01 00:00:00.00",
" ",
"",
"\n",
]
if "comment" in file_entry:
metadata[4] = encode_file_comment(file_entry["comment"])
with open(dst_file + ".uaem", "wb") as out_file:
out_file.write("".join(metadata).encode("UTF-8"))
if self.use_relative_paths:
self.config["hard_drive_{0}".format(drive_index)] = dir_name
else:
self.config["hard_drive_{0}".format(drive_index)] = dir_path
def unpack_hard_drive(self, i, src):
src, archive = self.expand_default_path(
src, self.fsgs.amiga.get_hard_drives_dir()
)
dir_name = "DH{0}".format(i)
dir_path = os.path.join(self.temp_dir, dir_name)
self.unpack_archive(src, dir_path)
self.config["hard_drive_{0}".format(i)] = dir_path
# def create_devs_dir(self):
# devs_dir = os.path.join(dest_dir, "Devs")
# if not os.path.exists(devs_dir):
# os.makedirs(devs_dir)
#
# def create_fonts_dir(self):
# fonts_dir = os.path.join(dest_dir, "Fonts")
# if not os.path.exists(fonts_dir):
# os.makedirs(fonts_dir)
def copy_hd_files(self):
whdload_args = self.config.get("x_whdload_args", "").strip()
hdinst_args = self.config.get("x_hdinst_args", "").strip()
hd_startup = self.config.get("hd_startup", "").strip()
if not whdload_args:
# The WHDLoad override setting and config key does not quite
# follow the usual semantics of configs/settings unfortunately, so
# we really want whdload_quit_key to be cleared when not using
# WHDLoad. Otherwise the emulator will try to quit everything with
# the WHDLoad quit key (when overriden).
self.config["whdload_quit_key"] = ""
if not whdload_args and not hdinst_args and not hd_startup:
return
dest_dir = os.path.join(self.temp_dir, "DH0")
if not self.config.get("hard_drive_0", ""):
self.config["hard_drive_0"] = dest_dir
self.config["hard_drive_0_label"] = "Workbench"
print("copy_hd_files, dest_dir = ", dest_dir)
s_dir = os.path.join(dest_dir, "S")
if not os.path.exists(s_dir):
os.makedirs(s_dir)
libs_dir = os.path.join(dest_dir, "Libs")
if not os.path.exists(libs_dir):
os.makedirs(libs_dir)
devs_dir = os.path.join(dest_dir, "Devs")
if not os.path.exists(devs_dir):
os.makedirs(devs_dir)
fonts_dir = os.path.join(dest_dir, "Fonts")
if not os.path.exists(fonts_dir):
os.makedirs(fonts_dir)
if hd_startup:
self.config["hard_drive_0_priority"] = "6"
# don't copy setpatch by default, at least not yet
pass
else:
self.hd_requirements.add("setpatch")
self.copy_setpatch(dest_dir)
amiga_model = self.config.get("amiga_model", "A500").upper()
if amiga_model in ["A500+", "A600"]:
workbench_version = "2.04"
elif amiga_model.startswith("A1200"):
workbench_version = "3.0"
elif amiga_model.startswith("A4000"):
workbench_version = "3.0"
else:
workbench_version = None
if "workbench" in self.hd_requirements:
if not workbench_version:
raise Exception(
"Unsupported workbench version for hd_requirements"
)
extractor = WorkbenchExtractor(self.fsgs)
extractor.install_version(workbench_version, dest_dir)
# install_workbench_files(self.fsgs, dest_dir, workbench_version)
for req in self.hd_requirements:
if "/" in req:
# assume a specific workbench file
extractor = WorkbenchExtractor(self.fsgs)
extractor.install_version(
workbench_version,
dest_dir,
[req],
install_startup_sequence=False,
)
if whdload_args:
self.copy_whdload_files(dest_dir, s_dir)
elif hdinst_args:
self.write_startup_sequence(s_dir, hdinst_args)
elif hd_startup:
self.write_startup_sequence(s_dir, hd_startup)
if "xpkmaster.library" in self.hd_requirements:
install_xpkmaster_files(dest_dir)
system_configuration_file = os.path.join(
devs_dir, "system-configuration"
)
if not os.path.exists(system_configuration_file):
with open(system_configuration_file, "wb") as f:
f.write(system_configuration)
def copy_whdload_files(self, dest_dir, s_dir):
# from fsgamesys.amiga.whdload import populate_whdload_system_volume
# populate_whdload_system_volume(dest_dir, s_dir, config=self.config)
pass
def get_whdload_dir(self):
path = self.config.get(Option.WHDLOAD_BOOT_DIR)
return path
def write_startup_sequence(self, s_dir, command):
from fsgamesys.amiga.startupsequence import write_startup_sequence
setpatch = None
if "setpatch" in self.hd_requirements:
if self.setpatch_installed:
setpatch = True
else:
setpatch = False
write_startup_sequence(s_dir, command, setpatch=setpatch)
# # FIXME: semi-colon is used in WHDLoad CONFIG options...
# command = "\n".join([x.strip() for x in command.split(";")])
# startup_sequence = os.path.join(s_dir, "Startup-Sequence")
# # if True:
# if not os.path.exists(startup_sequence):
# with open(startup_sequence, "wb") as f:
# if "setpatch" in self.hd_requirements:
# if self.setpatch_installed:
# f.write(
# setpatch_found_sequence.replace(
# "\r\n", "\n"
# ).encode("ISO-8859-1")
# )
# else:
# f.write(
# setpatch_not_found_sequence.replace(
# "\r\n", "\n"
# ).encode("ISO-8859-1")
# )
# f.write(command.replace("\r\n", "\n").encode("ISO-8859-1"))
# # The User-Startup file is useful if the user has provided a
# # base WHDLoad directory with an existing startup-sequence
# user_startup = os.path.join(s_dir, "User-Startup")
# with open(user_startup, "ab") as f:
# f.write(command.replace("\r\n", "\n").encode("ISO-8859-1"))
# def install_whdload_file(self, sha1, dest_dir, rel_path):
# abs_path = os.path.join(dest_dir, rel_path)
# name = os.path.basename(rel_path)
# self.on_progress(gettext("Downloading {0}...".format(name)))
# Downloader.install_file_by_sha1(sha1, name, abs_path)
# COPIED TO installfiles
def copy_setpatch(self, base_dir):
dest = os.path.join(base_dir, "C")
if not os.path.exists(dest):
os.makedirs(dest)
dest = os.path.join(dest, "SetPatch")
for checksum in workbench_disks_with_setpatch_39_6:
path = self.fsgs.file.find_by_sha1(checksum)
if path:
print("found WB DISK with SetPatch 39.6 at", path)
try:
input_stream = self.fsgs.file.open(path)
except Exception:
traceback.print_exc()
else:
wb_data = input_stream.read()
# archive = Archive(path)
# if archive.exists(path):
# f = archive.open(path)
# wb_data = f.read()
# f.close()
if self.extract_setpatch_39_6(wb_data, dest):
print("SetPatch installed")
self.setpatch_installed = True
break
else:
print("WARNING: extract_setpatch_39_6 returned False")
# else:
# print("oops, path does not exist")
else:
print("WARNING: did not find SetPatch 39.6")
# COPIED TO installfiles
@staticmethod
def extract_setpatch_39_6(wb_data: bytes, dest):
try:
setpatch_data = ADFFile(wb_data).open("C/SetPatch").read()
except KeyError:
return False
s = hashlib.sha1()
s.update(setpatch_data)
print(s.hexdigest())
# noinspection SpellCheckingInspection
if s.hexdigest() != AmigaConstants.SETPATCH_39_6_SHA1:
return False
with open(dest, "wb") as f:
f.write(setpatch_data)
return True
# def copy_whdload_kickstart(self, base_dir, name, checksums):
# dest = os.path.join(base_dir, "Devs", "Kickstarts")
# if not os.path.exists(dest):
# os.makedirs(dest)
# dest = os.path.join(dest, name)
# for checksum in checksums:
# # print("find kickstart with sha1", checksum)
# path = self.fsgs.file.find_by_sha1(checksum)
# if path: # and os.path.exists(path):
# print("found kickstart for", name, "at", path)
# archive = Archive(path)
# if archive.exists(path):
# with open(dest, "wb") as f:
# ROMManager.decrypt_archive_rom(archive, path, file=f)
# print(repr(dest))
# break
# else:
# stream = self.fsgs.file.open(path)
# if stream is None:
# raise Exception("Cannot find kickstart " + repr(path))
# with open(dest, "wb") as f:
# f.write(stream.read())
# else:
# print("did not find kickstart for", name)
def get_state_dir(self):
return self.game_paths.get_state_dir()
def init_changes(self):
if self.change_handler is None:
return
print("LaunchHandler.init_changes")
self.on_progress(gettext("Restoring changes..."))
self.change_handler.init(
self.get_state_dir(), ignore=["*.uss", "*.sdf"]
)
def update_changes(self):
if self.change_handler is None:
return
print("LaunchHandler.update_changes")
self.on_progress(gettext("Saving changes..."))
self.change_handler.update(self.get_state_dir())
def cleanup(self):
print("LaunchHandler.cleanup")
if os.environ.get("FSGS_CLEANUP", "") == "0":
print("[DRIVER] NOTICE: keeping temp files around...")
return
self.on_progress(gettext("Cleaning up..."))
# self.delete_tree(self.temp_dir)
shutil.rmtree(self.temp_dir, ignore_errors=True)
state_dir = self.get_state_dir()
try:
# this will only succeed if the directory is empty -we don't
# want to leave unnecessary empty state directories
os.rmdir(state_dir)
print("removed", repr(state_dir))
# also try to remove the parent (letter dir)
os.rmdir(os.path.dirname(state_dir))
print("removed", repr(os.path.dirname(state_dir)))
except OSError:
# could not delete directories - ok - probably has content
pass
def prepare_theme(self):
# path = self.game_paths.get_theme_path()
# if path:
# self.config["theme"] = path
pass
def prepare_extra_settings(self):
prefix = self.config.get("screenshots_output_prefix", "")
if prefix:
return
# name = self.config.get("floppy_drive_0", "")
# if not name:
# name = self.config.get("hard_drive_0", "")
# if not name:
# name = self.config.get("cdrom_drive_0", "")
# if not name:
# name = self.config.get("floppy_image_0", "")
name = self.config_name
if not name:
name = "fs-uae"
name, variant = GameNameUtil.extract_names(name)
name = GameNameUtil.create_cmpname(name)
self.config["screenshots_output_prefix"] = name
# def create_config(self):
# config = ConfigWriter(self.config).create_fsuae_config()
# return config
def write_config(self, f):
config_lines = self.create_config()
for line in config_lines:
f.write(line)
f.write("\n")
def write_config_to_file(self, path):
with open(path, "wb") as f:
self.write_config(f)
def run(self):
print("LaunchHandler.run")
# self.on_progress(gettext("Starting FS-UAE..."))
# current_task.set_progress(gettext("Starting FS-UAE..."))
current_task.set_progress("__run__")
config = self.create_config()
if self.use_relative_paths:
process, config_file = FSUAE.start_with_config(
config, cwd=self.temp_dir
)
else:
process, config_file = FSUAE.start_with_config(config)
pid_file = self.fsgc.settings[Option.EMULATOR_PID_FILE]
GameDriver.write_emulator_pid_file(pid_file, process)
process.wait()
GameDriver.remove_emulator_pid_file(pid_file)
print("LaunchHandler.start is done")
if os.environ.get("FSGS_CLEANUP", "") == "0":
print("Not removing", config_file)
else:
print("removing", config_file)
try:
os.remove(config_file)
except Exception:
print("could not remove config file", config_file)
def unpack_archive(self, path, destination):
print("unpack", path, "to", destination)
archive = Archive(path)
print(archive)
print(archive.get_handler())
for entry in archive.list_files():
if self.stop_flag:
return
print(entry)
n = entry[len(path) + 2 :]
amiga_rel_path = amiga_path_to_host_path(n)
out_path = os.path.join(destination, amiga_rel_path)
print("out path", out_path)
if entry.endswith("/"):
os.makedirs(out_path)
else:
if not os.path.exists(os.path.dirname(out_path)):
os.makedirs(os.path.dirname(out_path))
f = archive.open(entry)
with open(out_path, "wb") as out_f:
while True:
data = f.read(65536)
if not data:
break
out_f.write(data)
# FIXME: Extract real timestamps from archive
# FIXME: Real metadata from archive
# noinspection SpellCheckingInspection
metadata = [
"----rwed",
" ",
"2000-01-01 00:00:00.00",
" ",
"",
"\n",
]
info = archive.getinfo(entry)
if info.comment:
# print(info.comment)
# raise Exception("gnit")
metadata[4] = encode_file_comment(info.comment)
with open(out_path + ".uaem", "wb") as out_file:
out_file.write("".join(metadata).encode("UTF-8"))
def copy_folder_tree(self, source_path, dest_path, overwrite=False):
for item in os.listdir(source_path):
if self.stop_flag:
return
if item[0] == ".":
continue
item_path = os.path.join(source_path, item)
dest_item_path = os.path.join(dest_path, item)
if os.path.isdir(item_path):
if not os.path.exists(dest_item_path):
os.makedirs(dest_item_path)
self.copy_folder_tree(item_path, dest_item_path)
if self.stop_flag:
return
else:
if overwrite or not os.path.exists(dest_item_path):
print("copy", repr(item_path), "to", repr(dest_item_path))
shutil.copy(item_path, dest_item_path)
def encode_file_comment(comment):
result = []
# raw = 0
if isinstance(comment, str):
comment = comment.encode("ISO-8859-1")
for c in comment:
# if c == '%':
# result.append("%")
# raw = 2
# elif raw:
# result.append(c)
# raw = raw - 1
# else:
# result.append("%{0:x}".format(ord(c)))
result.append("%{0:x}".format(c))
return "".join(result)
def amiga_path_to_host_path(amiga_rel_path):
# amiga_rel_path = amiga_path
print("amiga_rel_path", amiga_rel_path)
amiga_rel_parts = amiga_rel_path.split("/")
for i, part in enumerate(amiga_rel_parts):
# part can be blank if amiga_rel_parts is a directory
# (ending with /)
if part:
amiga_rel_parts[i] = amiga_filename_to_host_filename(part)
amiga_rel_path = "/".join(amiga_rel_parts)
return amiga_rel_path
def amiga_filename_to_host_filename(amiga_name, ascii_only=False):
"""
Converted from FS-UAE C code (src/od-fs/fsdb-host.py)
@author: TheCyberDruid
"""
length = len(amiga_name)
replace_1 = -1
replace_2 = -1
check = amiga_name[:3].upper()
dot_pos = -1
if check in ["AUX", "CON", "PRN", "NUL"]:
dot_pos = 4
elif check in ["LPT", "COM"] and length >= 4 and amiga_name[3].isdigit():
dot_pos = 5
if dot_pos > -1 and (
length == (dot_pos - 1)
or (length > dot_pos and amiga_name[dot_pos] == ".")
):
replace_1 = 2
if amiga_name[-1] == "." or amiga_name[-1] == " ":
replace_2 = length - 1
i = 0
filename = ""
for c in amiga_name:
x = ord(c)
replace = False
if i == replace_1:
replace = True
elif i == replace_2:
replace = True
elif x < 32:
replace = True
elif ascii_only and x > 127:
replace = True
if not replace:
for evil in EVIL_CHARS:
if evil == c:
replace = True
break
if (i == length - 1) and amiga_name[-5:] == ".uaem":
replace = True
if replace:
filename += "%" + "%02x" % ord(c)
else:
filename += c
i += 1
return filename
EVIL_CHARS = '%\\*?"/|<>'
system_configuration = (
b"\x08\x00\x00\x05\x00\x00\x00\x00\x00\x00\xc3"
b"P\x00\x00\x00\x00\x00\t'\xc0\x00\x00\x00\x01\x00\x00N \x00\x00\x00\x00"
b"\xc0\x00@\x00p\x00\xb0\x00<\x00L\x00?\x00C\x00\x1f\xc0 \xc0\x1f\xc0 \x00"
b"\x0f\x00\x11\x00\r\x80\x12\x80\x04\xc0\t@\x04`\x08\xa0\x00 \x00@\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\xff\x00\x0eD\x00\x00\x0e\xec\x00\x01\n\xaa\x00\x00\x0f"
b"\xff\x06\x8b\x00\x00\x00\x81\x00,\x00\x00\x00\x00generic\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00K\x00\x00\x00\x00\x00\x00\x00\x07"
b"\x00 \x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00"
b"\x00"
)
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/amiga/launchhandler.py
|
Python
|
gpl-2.0
| 46,150
|
[
"ADF"
] |
355ad9fe5d9352ceba4ea0d11d18ff52c61274c3395fd09642acf5b41600bcb9
|
#!/usr/bin/env python
__author__ = 'Mike McCann,Duane Edgington,Reiko Michisaki'
__copyright__ = '2013'
__license__ = 'GPL v3'
__contact__ = 'duane at mbari.org'
__doc__ = '''
cron loader for CANON wave gliders slocum, OA and TEX in September 2013
Mike McCann; Modified by Duane Edgington and Reiko Michisaki
MBARI 02 September 2013
@var __date__: Date of last svn commit
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import os
import sys
import datetime # needed for glider data
import time # for startdate, enddate args
project_dir = os.path.dirname(__file__)
# the next line makes it possible to find CANON
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) # this makes it possible to find CANON, one directory up
from CANON import CANONLoader
# building input data sources object
from socket import gethostname
hostname=gethostname()
print(hostname)
if hostname=='odss-test.shore.mbari.org':
cl = CANONLoader('stoqs_september2011', 'CANON - September 2011')
else:
cl = CANONLoader('stoqs_september2013', 'CANON - September 2013')
# default location of thredds and dods data:
cl.tdsBase = 'http://odss.mbari.org/thredds/'
cl.dodsBase = cl.tdsBase + 'dodsC/'
######################################################################
# GLIDERS
######################################################################
# Set start and end dates for all glider loads
# startdate is 24hours from now
ts=time.time()-(13.2*60*60)
st=datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M')
t=time.strptime(st,"%Y-%m-%d %H:%M")
#t =time.strptime("2013-09-01 0:01", "%Y-%m-%d %H:%M")
startdate=t[:6]
t =time.strptime("2013-10-31 0:01", "%Y-%m-%d %H:%M")
enddate=t[:6]
# Glider ctd
cl.glider_ctd_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/Slocum_Teledyne/'
cl.glider_ctd_files = [ 'nemesis_ctd.nc',
# 'ucsc260_ctd.nc',
'ucsc294_ctd.nc']
cl.glider_ctd_parms = ['TEMP', 'PSAL' ]
cl.glider_ctd_startDatetime = datetime.datetime(*startdate[:])
cl.glider_ctd_endDatetime = datetime.datetime(*enddate[:])
# Glider met
cl.glider_met_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/Slocum_Teledyne/'
cl.glider_met_files = [ 'nemesis_met.nc',
# 'ucsc260_met.nc',
'ucsc294_met.nc']
cl.glider_met_parms = ['meanu','meanv' ]
cl.glider_met_startDatetime = datetime.datetime(*startdate[:])
cl.glider_met_endDatetime = datetime.datetime(*enddate[:])
# WG OA
cl.wg_oa_ctd_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/WG_OA/NetCDF/'
cl.wg_oa_ctd_files = [ 'WG_OA_ctd.nc']
cl.wg_oa_ctd_parms = ['TEMP', 'PSAL','DENSITY','OXYGEN' ]
cl.wg_oa_ctd_startDatetime = datetime.datetime(*startdate[:])
cl.wg_oa_ctd_endDatetime = datetime.datetime(*enddate[:])
# WG Tex
cl.wg_tex_ctd_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/WG_Tex/NetCDF/'
cl.wg_tex_ctd_files = [ 'WG_Tex_ctd.nc']
cl.wg_tex_ctd_parms = ['TEMP', 'PSAL','DENSITY' ]
cl.wg_tex_ctd_startDatetime = datetime.datetime(*startdate[:])
cl.wg_tex_ctd_endDatetime = datetime.datetime(*enddate[:])
# WG OA
cl.wg_oa_met_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/WG_OA/NetCDF/'
cl.wg_oa_met_files = [ 'WG_OA_met.nc']
cl.wg_oa_met_parms = ['WINDSPEED','WINDDIRECTION','AIRTEMPERATURE','AIRPRESSURE']
cl.wg_oa_met_startDatetime = datetime.datetime(*startdate[:])
cl.wg_oa_met_endDatetime = datetime.datetime(*enddate[:])
# WG Tex
cl.wg_tex_met_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/WG_Tex/NetCDF/'
cl.wg_tex_met_parms = ['WINDSPEED','WINDDIRECTION','AIRTEMPERATURE','AIRPRESSURE']
cl.wg_tex_met_files = [ 'WG_Tex_met.nc']
cl.wg_tex_met_startDatetime = datetime.datetime(*startdate[:])
cl.wg_tex_met_endDatetime = datetime.datetime(*enddate[:])
# WG OA
cl.wg_oa_pco2_base = cl.dodsBase + 'CANON_september2013/Platforms/Gliders/WG_OA/NetCDF/'
cl.wg_oa_pco2_files = [ 'WG_OA_pco2.nc']
cl.wg_oa_pco2_parms = ['pH','eqpco2','airco2','airtemp' ]
cl.wg_oa_pco2_startDatetime = datetime.datetime(*startdate[:])
cl.wg_oa_pco2_endDatetime = datetime.datetime(*enddate[:])
###################################################################################################################
# Execute the load
cl.process_command_line()
if cl.args.test:
# cl.load_wg_oa_pco2(stride=1)
# cl.load_wg_oa_ctd(stride=1)
# cl.load_wg_oa_met(stride=1)
cl.load_wg_tex_ctd(stride=1)
cl.load_wg_tex_met(stride=1)
cl.load_glider_ctd(stride=1)
cl.load_glider_met(stride=1)
elif cl.args.optimal_stride:
# cl.load_wg_oa_pco2(stride=2)
# cl.load_wg_oa_ctd(stride=2)
# cl.load_wg_oa_met(stride=2)
cl.load_wg_tex_ctd(stride=2)
cl.load_wg_tex_met(stride=2)
cl.load_glider_ctd(stride=2)
cl.load_glider_met(stride=2)
else:
# cl.load_wg_oa_pco2(stride=1)
# cl.load_wg_oa_ctd(stride=1)
# cl.load_wg_oa_met(stride=1)
cl.load_wg_tex_ctd(stride=1)
cl.load_wg_tex_met(stride=1)
cl.load_glider_ctd(stride=1)
cl.load_glider_met(stride=1)
|
stoqs/stoqs
|
stoqs/loaders/CANON/wg_loadsep2013.py
|
Python
|
gpl-3.0
| 5,101
|
[
"NetCDF"
] |
a88e669d30fcbc52aa3bbc9cd831f28506fb8fae4d292031ad20cf2f36b6d133
|
#!/usr/bin/env python
# Copyright 2020 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This application verifies HSM attestations using certificate bundles
obtained from Cloud HSM.
For more information, visit https://cloud.google.com/kms/docs/attest-key.
"""
# [START verify_attestations]
import argparse
import gzip
from cryptography import exceptions
from cryptography import x509
from cryptography.hazmat import backends
from cryptography.hazmat.primitives.asymmetric import padding
import pem
def verify(attestation_file, bundle_file):
"""Verifies an attestation using a bundle of certificates.
Args:
attestation_file: The name of the attestation file.
bundle_file: The name of the bundle file containing the certificates
used to verify the attestation.
Returns:
True if at least one of the certificates in bundle_file can verify the
attestation data and its signature.
"""
with gzip.open(attestation_file, 'rb') as f:
# An attestation file consists of a data portion and a 256 byte
# signature portion concatenated together.
attestation = f.read()
# Separate the components.
data = attestation[:-256]
signature = attestation[-256:]
# Verify the attestation with one of the certificates in the bundle
for cert in pem.parse_file(bundle_file):
cert_obj = x509.load_pem_x509_certificate(
str(cert).encode('utf-8'), backends.default_backend())
try:
# Check if the data was signed by the private key associated
# with the public key in the certificate. The data should have
# been signed with PKCS1v15 padding.
cert_obj.public_key().verify(
signature, data, padding.PKCS1v15(),
cert_obj.signature_hash_algorithm)
return True
except exceptions.InvalidSignature:
# Certificate bundles contain certificates that will not be
# able to verify the attestation, so the InvalidSignature
# errors can be ignored.
continue
return False
# [END verify_attestations]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__)
parser.add_argument('attestation_file', help="Name of attestation file.")
parser.add_argument('bundle_file', help="Name of certificate bundle file.")
args = parser.parse_args()
if verify(args.attestation_file, args.bundle_file):
print('Signature verified.')
else:
print('Signature verification failed.')
|
googleapis/python-kms
|
samples/attestations/verify_attestation.py
|
Python
|
apache-2.0
| 3,178
|
[
"VisIt"
] |
d04c745af26ac46b2ce9dd406afb43139f563586181646f06b6e709901d9a2be
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
This module contains the classes for configuration of the chemenv package.
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__credits__ = "Geoffroy Hautier"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
from pymatgen.analysis.chemenv.utils.chemenv_errors import ChemenvError
from pymatgen.analysis.chemenv.utils.scripts_utils import strategies_class_lookup
from os.path import expanduser, exists
from os import makedirs
import json
class ChemEnvConfig():
"""
Class used to store the configuration of the chemenv package :
- Materials project access
- ICSD database access
- Default options (strategies, ...)
"""
DEFAULT_PACKAGE_OPTIONS = {'default_strategy': {'strategy': 'SimplestChemenvStrategy',
'strategy_options': {'distance_cutoff': strategies_class_lookup['SimplestChemenvStrategy'].DEFAULT_DISTANCE_CUTOFF,
'angle_cutoff': strategies_class_lookup['SimplestChemenvStrategy'].DEFAULT_ANGLE_CUTOFF,
'additional_condition': strategies_class_lookup['SimplestChemenvStrategy'].DEFAULT_ADDITIONAL_CONDITION,
'continuous_symmetry_measure_cutoff': strategies_class_lookup['SimplestChemenvStrategy'].DEFAULT_CONTINUOUS_SYMMETRY_MEASURE_CUTOFF}},
'default_max_distance_factor': 1.5
}
def __init__(self, materials_project_configuration=None, package_options=None):
self.materials_project_configuration = materials_project_configuration
if package_options is None:
self.package_options = self.DEFAULT_PACKAGE_OPTIONS
else:
self.package_options = package_options
def setup(self):
while True:
print('\n=> Configuration of the ChemEnv package <=')
print('Current configuration :')
if self.has_materials_project_access:
print(' - Access to materials project is configured (add test ?)')
else:
print(' - No access to materials project')
print(' - Package options :')
for key, val in self.package_options.items():
print(' {} : {}'.format(str(key), str(val)))
print('\nChoose in the following :')
print(' <1> + <ENTER> : setup of the access to the materials project database')
print(' <2> + <ENTER> : configuration of the package options (strategy, ...)')
print(' <q> + <ENTER> : quit without saving configuration')
test = raw_input(' <S> + <ENTER> : save configuration and quit\n ... ')
if test == '1':
self.setup_materials_project_configuration()
elif test == '2':
self.setup_package_options()
elif test == 'q':
break
elif test == 'S':
config_file = self.save()
break
else:
print(' ... wrong key, try again ...')
print('')
if test == 'S':
print('Configuration has been saved to file "{}"'.format(config_file))
def setup_materials_project_configuration(self):
api_key = raw_input('\nEnter your Materials Project API key : ')
self.materials_project_configuration = {'api_key': api_key}
@property
def has_materials_project_access(self):
return self.materials_project_configuration is not None
def setup_package_options(self):
self.package_options = self.DEFAULT_PACKAGE_OPTIONS
print('Choose between the following strategies : ')
strategies = list(strategies_class_lookup.keys())
for istrategy, strategy in enumerate(strategies):
print(' <{}> : {}'.format(str(istrategy + 1), strategy))
test = raw_input(' ... ')
self.package_options['default_strategy'] = {'strategy': strategies[int(test) - 1], 'strategy_options': {}}
strategy_class = strategies_class_lookup[strategies[int(test) - 1]]
if len(strategy_class.STRATEGY_OPTIONS) > 0:
for option, option_dict in strategy_class.STRATEGY_OPTIONS.items():
while True:
print(' => Enter value for option "{}" '
'(<ENTER> for default = {})\n'.format(option,
str(option_dict['default'])))
print(' Valid options are :\n')
print(' {}'.format(option_dict['type'].allowed_values))
test = raw_input(' Your choice : ')
if test == '':
self.package_options['default_strategy']['strategy_options'][option] = option_dict['type'](strategy_class.STRATEGY_OPTIONS[option]['default'])
break
try:
self.package_options['default_strategy']['strategy_options'][option] = option_dict['type'](test)
break
except ValueError:
print('Wrong input for option {}'.format(option))
def package_options_description(self):
out = 'Package options :\n'
out += ' - Maximum distance factor : {:.4f}\n'.format(self.package_options['default_max_distance_factor'])
out += ' - Default strategy is "{}" :\n'.format(self.package_options['default_strategy']['strategy'])
strategy_class = strategies_class_lookup[self.package_options['default_strategy']['strategy']]
out += '{}\n'.format(strategy_class.STRATEGY_DESCRIPTION)
out += ' with options :\n'
for option, option_dict in strategy_class.STRATEGY_OPTIONS.items():
out += ' - {} : {}\n'.format(option,
self.package_options['default_strategy']['strategy_options'][option])
return out
def save(self, root_dir=None):
if root_dir is None:
home = expanduser("~")
root_dir = '{}/.chemenv'.format(home)
if not exists(root_dir):
makedirs(root_dir)
config_dict = {'materials_project_configuration': self.materials_project_configuration,
'package_options': self.package_options}
config_file = '{}/config.json'.format(root_dir)
if exists(config_file):
test = raw_input('Overwrite existing configuration ? (<Y> + <ENTER> to confirm)')
if test != 'Y':
print('Configuration not saved')
return config_file
f = open(config_file, 'w')
json.dump(config_dict, f)
f.close()
print('Configuration saved')
return config_file
@classmethod
def auto_load(cls, root_dir=None):
if root_dir is None:
home = expanduser("~")
root_dir = '{}/.chemenv'.format(home)
config_file = '{}/config.json'.format(root_dir)
try:
f = open(config_file, 'r')
config_dict = json.load(f)
f.close()
return ChemEnvConfig(materials_project_configuration=config_dict['materials_project_configuration'],
package_options=config_dict['package_options'])
except IOError:
print('Unable to load configuration from file "{}" ...'.format(config_file))
print(' ... loading default configuration')
return ChemEnvConfig()
@property
def materials_project_api_key(self):
if self.materials_project_configuration is None:
raise ChemenvError('ChemEnvConfig', 'materials_project_api_key', 'No api_key saved')
return self.materials_project_configuration['api_key']
|
tallakahath/pymatgen
|
pymatgen/analysis/chemenv/utils/chemenv_config.py
|
Python
|
mit
| 8,193
|
[
"pymatgen"
] |
d9fc4f0b024ed9b59cea899c290919b639f28df23e5ca1ffde30905022f37cac
|
from instagram.client import InstagramAPI
import sys
if len(sys.argv) > 1 and sys.argv[1] == 'local':
try:
from config import *
#InstagramAPI.host = test_host
#InstagramAPI.base_path = test_base_path
InstagramAPI.access_token_field = "access_token"
InstagramAPI.authorize_url = test_authorize_url
InstagramAPI.access_token_url = test_access_token_url
InstagramAPI.protocol = test_protocol
except Exception:
pass
client_id = raw_input("Client ID: ").strip()
client_secret = raw_input("Client Secret: ").strip()
redirect_uri = raw_input("Redirect URI: ").strip()
raw_scope = raw_input("Requested scope (separated by spaces, blank for just basic read): ").strip()
scope = raw_scope.split(' ')
# For basic, API seems to need to be set explicitly
if not scope or scope == [""]:
scope = ["basic"]
api = InstagramAPI(client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri)
redirect_uri = api.get_authorize_login_url(scope = scope)
print "Visit this page and authorize access in your browser:\n", redirect_uri
code = raw_input("Paste in code in query string after redirect: ").strip()
access_token = api.exchange_code_for_access_token(code)
print "access token:\n", access_token
|
haukurk/Partify
|
other/get_instagram_access_token.py
|
Python
|
mit
| 1,274
|
[
"VisIt"
] |
13679e9ac927d7d3d11362072ba71387fddce5e7ab6c97d600a11737faa8e575
|
#!/usr/bin/env python
# Copyright 2010-2013 by Peter Cock.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
r"""Read and write BGZF compressed files (the GZIP variant used in BAM).
The SAM/BAM file format (Sequence Alignment/Map) comes in a plain text
format (SAM), and a compressed binary format (BAM). The latter uses a
modified form of gzip compression called BGZF (Blocked GNU Zip Format),
which can be applied to any file format to provide compression with
efficient random access. BGZF is described together with the SAM/BAM
file format at http://samtools.sourceforge.net/SAM1.pdf
Please read the text below about 'virtual offsets' before using BGZF
files for random access.
Aim of this module
------------------
The Python gzip library can be used to read BGZF files, since for
decompression they are just (specialised) gzip files. What this
module aims to facilitate is random access to BGZF files (using the
'virtual offset' idea), and writing BGZF files (which means using
suitably sized gzip blocks and writing the extra 'BC' field in the
gzip headers). As in the gzip library, the zlib library is used
internally.
In addition to being required for random access to and writing of
BAM files, the BGZF format can also be used on other sequential
data (in the sense of one record after another), such as most of
the sequence data formats supported in Bio.SeqIO (like FASTA,
FASTQ, GenBank, etc) or large MAF alignments.
The Bio.SeqIO indexing functions use this module to support BGZF files.
Technical Introduction to BGZF
------------------------------
The gzip file format allows multiple compressed blocks, each of which
could be a stand alone gzip file. As an interesting bonus, this means
you can use Unix "cat" to combined to gzip files into one by
concatenating them. Also, each block can have one of several compression
levels (including uncompressed, which actually takes up a little bit
more space due to the gzip header).
What the BAM designers realised was that while random access to data
stored in traditional gzip files was slow, breaking the file into
gzip blocks would allow fast random access to each block. To access
a particular piece of the decompressed data, you just need to know
which block it starts in (the offset of the gzip block start), and
how far into the (decompressed) contents of the block you need to
read.
One problem with this is finding the gzip block sizes efficiently.
You can do it with a standard gzip file, but it requires every block
to be decompressed -- and that would be rather slow. Additionally
typical gzip files may use very large blocks.
All that differs in BGZF is that compressed size of each gzip block
is limited to 2^16 bytes, and an extra 'BC' field in the gzip header
records this size. Traditional decompression tools can ignore this,
and unzip the file just like any other gzip file.
The point of this is you can look at the first BGZF block, find out
how big it is from this 'BC' header, and thus seek immediately to
the second block, and so on.
The BAM indexing scheme records read positions using a 64 bit
'virtual offset', comprising coffset << 16 | uoffset, where coffset
is the file offset of the BGZF block containing the start of the read
(unsigned integer using up to 64-16 = 48 bits), and uoffset is the
offset within the (decompressed) block (unsigned 16 bit integer).
This limits you to BAM files where the last block starts by 2^48
bytes, or 256 petabytes, and the decompressed size of each block
is at most 2^16 bytes, or 64kb. Note that this matches the BGZF
'BC' field size which limits the compressed size of each block to
2^16 bytes, allowing for BAM files to use BGZF with no gzip
compression (useful for intermediate files in memory to reduced
CPU load).
Warning about namespaces
------------------------
It is considered a bad idea to use "from XXX import ``*``" in Python, because
it pollutes the namespace. This is a real issue with Bio.bgzf (and the
standard Python library gzip) because they contain a function called open
i.e. Suppose you do this:
>>> from Bio.bgzf import *
>>> print(open.__module__)
Bio.bgzf
Or,
>>> from gzip import *
>>> print(open.__module__)
gzip
Notice that the open function has been replaced. You can "fix" this if you
need to by importing the built-in open function:
>>> try:
... from __builtin__ import open # Python 2
... except ImportError:
... from builtins import open # Python 3
...
However, what we recommend instead is to use the explicit namespace, e.g.
>>> from Bio import bgzf
>>> print(bgzf.open.__module__)
Bio.bgzf
Example
-------
This is an ordinary GenBank file compressed using BGZF, so it can
be decompressed using gzip,
>>> import gzip
>>> handle = gzip.open("GenBank/NC_000932.gb.bgz", "r")
>>> assert 0 == handle.tell()
>>> line = handle.readline()
>>> assert 80 == handle.tell()
>>> line = handle.readline()
>>> assert 143 == handle.tell()
>>> data = handle.read(70000)
>>> assert 70143 == handle.tell()
>>> handle.close()
We can also access the file using the BGZF reader - but pay
attention to the file offsets which will be explained below:
>>> handle = BgzfReader("GenBank/NC_000932.gb.bgz", "r")
>>> assert 0 == handle.tell()
>>> print(handle.readline().rstrip())
LOCUS NC_000932 154478 bp DNA circular PLN 15-APR-2009
>>> assert 80 == handle.tell()
>>> print(handle.readline().rstrip())
DEFINITION Arabidopsis thaliana chloroplast, complete genome.
>>> assert 143 == handle.tell()
>>> data = handle.read(70000)
>>> assert 987828735 == handle.tell()
>>> print(handle.readline().rstrip())
f="GeneID:844718"
>>> print(handle.readline().rstrip())
CDS complement(join(84337..84771,85454..85843))
>>> offset = handle.seek(make_virtual_offset(55074, 126))
>>> print(handle.readline().rstrip())
68521 tatgtcattc gaaattgtat aaagacaact cctatttaat agagctattt gtgcaagtat
>>> handle.close()
Notice the handle's offset looks different as a BGZF file. This
brings us to the key point about BGZF, which is the block structure:
>>> handle = open("GenBank/NC_000932.gb.bgz", "rb")
>>> for values in BgzfBlocks(handle):
... print("Raw start %i, raw length %i; data start %i, data length %i" % values)
Raw start 0, raw length 15073; data start 0, data length 65536
Raw start 15073, raw length 17857; data start 65536, data length 65536
Raw start 32930, raw length 22144; data start 131072, data length 65536
Raw start 55074, raw length 22230; data start 196608, data length 65536
Raw start 77304, raw length 14939; data start 262144, data length 43478
Raw start 92243, raw length 28; data start 305622, data length 0
>>> handle.close()
In this example the first three blocks are 'full' and hold 65536 bytes
of uncompressed data. The fourth block isn't full and holds 43478 bytes.
Finally there is a special empty fifth block which takes 28 bytes on
disk and serves as an 'end of file' (EOF) marker. If this is missing,
it is possible your BGZF file is incomplete.
By reading ahead 70,000 bytes we moved into the second BGZF block,
and at that point the BGZF virtual offsets start to look different
to a simple offset into the decompressed data as exposed by the gzip
library.
As an example, consider seeking to the decompressed position 196734.
Since 196734 = 65536 + 65536 + 65536 + 126 = 65536*3 + 126, this
is equivalent to jumping the first three blocks (which in this
specific example are all size 65536 after decompression - which
does not always hold) and starting at byte 126 of the fourth block
(after decompression). For BGZF, we need to know the fourth block's
offset of 55074 and the offset within the block of 126 to get the
BGZF virtual offset.
>>> print(55074 << 16 | 126)
3609329790
>>> print(bgzf.make_virtual_offset(55074, 126))
3609329790
Thus for this BGZF file, decompressed position 196734 corresponds
to the virtual offset 3609329790. However, another BGZF file with
different contents would have compressed more or less efficiently,
so the compressed blocks would be different sizes. What this means
is the mapping between the uncompressed offset and the compressed
virtual offset depends on the BGZF file you are using.
If you are accessing a BGZF file via this module, just use the
handle.tell() method to note the virtual offset of a position you
may later want to return to using handle.seek().
The catch with BGZF virtual offsets is while they can be compared
(which offset comes first in the file), you cannot safely subtract
them to get the size of the data between them, nor add/subtract
a relative offset.
Of course you can parse this file with Bio.SeqIO using BgzfReader,
although there isn't any benefit over using gzip.open(...), unless
you want to index BGZF compressed sequence files:
>>> from Bio import SeqIO
>>> handle = BgzfReader("GenBank/NC_000932.gb.bgz")
>>> record = SeqIO.read(handle, "genbank")
>>> handle.close()
>>> print(record.id)
NC_000932.1
"""
from __future__ import print_function
import sys # to detect when under Python 2
import zlib
import struct
from Bio._py3k import _as_bytes, _as_string
from Bio._py3k import open as _open
__docformat__ = "restructuredtext en"
# For Python 2 can just use: _bgzf_magic = '\x1f\x8b\x08\x04'
# but need to use bytes on Python 3
_bgzf_magic = b"\x1f\x8b\x08\x04"
_bgzf_header = b"\x1f\x8b\x08\x04\x00\x00\x00\x00\x00\xff\x06\x00\x42\x43\x02\x00"
_bgzf_eof = b"\x1f\x8b\x08\x04\x00\x00\x00\x00\x00\xff\x06\x00BC\x02\x00\x1b\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00"
_bytes_BC = b"BC"
def open(filename, mode="rb"):
"""Open a BGZF file for reading, writing or appending."""
if "r" in mode.lower():
return BgzfReader(filename, mode)
elif "w" in mode.lower() or "a" in mode.lower():
return BgzfWriter(filename, mode)
else:
raise ValueError("Bad mode %r" % mode)
def make_virtual_offset(block_start_offset, within_block_offset):
"""Compute a BGZF virtual offset from block start and within block offsets.
The BAM indexing scheme records read positions using a 64 bit
'virtual offset', comprising in C terms:
block_start_offset << 16 | within_block_offset
Here block_start_offset is the file offset of the BGZF block
start (unsigned integer using up to 64-16 = 48 bits), and
within_block_offset within the (decompressed) block (unsigned
16 bit integer).
>>> make_virtual_offset(0, 0)
0
>>> make_virtual_offset(0, 1)
1
>>> make_virtual_offset(0, 2**16 - 1)
65535
>>> make_virtual_offset(0, 2**16)
Traceback (most recent call last):
...
ValueError: Require 0 <= within_block_offset < 2**16, got 65536
>>> 65536 == make_virtual_offset(1, 0)
True
>>> 65537 == make_virtual_offset(1, 1)
True
>>> 131071 == make_virtual_offset(1, 2**16 - 1)
True
>>> 6553600000 == make_virtual_offset(100000, 0)
True
>>> 6553600001 == make_virtual_offset(100000, 1)
True
>>> 6553600010 == make_virtual_offset(100000, 10)
True
>>> make_virtual_offset(2**48, 0)
Traceback (most recent call last):
...
ValueError: Require 0 <= block_start_offset < 2**48, got 281474976710656
"""
if within_block_offset < 0 or within_block_offset >= 65536:
raise ValueError("Require 0 <= within_block_offset < 2**16, got %i" % within_block_offset)
if block_start_offset < 0 or block_start_offset >= 281474976710656:
raise ValueError("Require 0 <= block_start_offset < 2**48, got %i" % block_start_offset)
return (block_start_offset << 16) | within_block_offset
def split_virtual_offset(virtual_offset):
"""Divides a 64-bit BGZF virtual offset into block start & within block offsets.
>>> (100000, 0) == split_virtual_offset(6553600000)
True
>>> (100000, 10) == split_virtual_offset(6553600010)
True
"""
start = virtual_offset >>16
return start, virtual_offset ^ (start << 16)
def BgzfBlocks(handle):
"""Low level debugging function to inspect BGZF blocks.
Expects a BGZF compressed file opened in binary read mode using
the builtin open function. Do not use a handle from this bgzf
module or the gzip module's open function which will decompress
the file.
Returns the block start offset (see virtual offsets), the block
length (add these for the start of the next block), and the
decompressed length of the blocks contents (limited to 65536 in
BGZF), as an iterator - one tuple per BGZF block.
>>> try:
... from __builtin__ import open # Python 2
... except ImportError:
... from builtins import open # Python 3
...
>>> handle = open("SamBam/ex1.bam", "rb")
>>> for values in BgzfBlocks(handle):
... print("Raw start %i, raw length %i; data start %i, data length %i" % values)
Raw start 0, raw length 18239; data start 0, data length 65536
Raw start 18239, raw length 18223; data start 65536, data length 65536
Raw start 36462, raw length 18017; data start 131072, data length 65536
Raw start 54479, raw length 17342; data start 196608, data length 65536
Raw start 71821, raw length 17715; data start 262144, data length 65536
Raw start 89536, raw length 17728; data start 327680, data length 65536
Raw start 107264, raw length 17292; data start 393216, data length 63398
Raw start 124556, raw length 28; data start 456614, data length 0
>>> handle.close()
Indirectly we can tell this file came from an old version of
samtools because all the blocks (except the final one and the
dummy empty EOF marker block) are 65536 bytes. Later versions
avoid splitting a read between two blocks, and give the header
its own block (useful to speed up replacing the header). You
can see this in ex1_refresh.bam created using samtools 0.1.18:
samtools view -b ex1.bam > ex1_refresh.bam
>>> handle = open("SamBam/ex1_refresh.bam", "rb")
>>> for values in BgzfBlocks(handle):
... print("Raw start %i, raw length %i; data start %i, data length %i" % values)
Raw start 0, raw length 53; data start 0, data length 38
Raw start 53, raw length 18195; data start 38, data length 65434
Raw start 18248, raw length 18190; data start 65472, data length 65409
Raw start 36438, raw length 18004; data start 130881, data length 65483
Raw start 54442, raw length 17353; data start 196364, data length 65519
Raw start 71795, raw length 17708; data start 261883, data length 65411
Raw start 89503, raw length 17709; data start 327294, data length 65466
Raw start 107212, raw length 17390; data start 392760, data length 63854
Raw start 124602, raw length 28; data start 456614, data length 0
>>> handle.close()
The above example has no embedded SAM header (thus the first block
is very small at just 38 bytes of decompressed data), while the next
example does (a larger block of 103 bytes). Notice that the rest of
the blocks show the same sizes (they contain the same read data):
>>> handle = open("SamBam/ex1_header.bam", "rb")
>>> for values in BgzfBlocks(handle):
... print("Raw start %i, raw length %i; data start %i, data length %i" % values)
Raw start 0, raw length 104; data start 0, data length 103
Raw start 104, raw length 18195; data start 103, data length 65434
Raw start 18299, raw length 18190; data start 65537, data length 65409
Raw start 36489, raw length 18004; data start 130946, data length 65483
Raw start 54493, raw length 17353; data start 196429, data length 65519
Raw start 71846, raw length 17708; data start 261948, data length 65411
Raw start 89554, raw length 17709; data start 327359, data length 65466
Raw start 107263, raw length 17390; data start 392825, data length 63854
Raw start 124653, raw length 28; data start 456679, data length 0
>>> handle.close()
"""
data_start = 0
while True:
start_offset = handle.tell()
# This may raise StopIteration which is perfect here
block_length, data = _load_bgzf_block(handle)
data_len = len(data)
yield start_offset, block_length, data_start, data_len
data_start += data_len
def _load_bgzf_block(handle, text_mode=False):
"""Internal function to load the next BGZF function (PRIVATE)."""
magic = handle.read(4)
if not magic:
# End of file
raise StopIteration
if magic != _bgzf_magic:
raise ValueError(r"A BGZF (e.g. a BAM file) block should start with "
r"%r, not %r; handle.tell() now says %r"
% (_bgzf_magic, magic, handle.tell()))
gzip_mod_time, gzip_extra_flags, gzip_os, extra_len = \
struct.unpack("<LBBH", handle.read(8))
block_size = None
x_len = 0
while x_len < extra_len:
subfield_id = handle.read(2)
subfield_len = struct.unpack("<H", handle.read(2))[0] # uint16_t
subfield_data = handle.read(subfield_len)
x_len += subfield_len + 4
if subfield_id == _bytes_BC:
assert subfield_len == 2, "Wrong BC payload length"
assert block_size is None, "Two BC subfields?"
block_size = struct.unpack("<H", subfield_data)[0] + 1 # uint16_t
assert x_len == extra_len, (x_len, extra_len)
assert block_size is not None, "Missing BC, this isn't a BGZF file!"
# Now comes the compressed data, CRC, and length of uncompressed data.
deflate_size = block_size - 1 - extra_len - 19
d = zlib.decompressobj(-15) # Negative window size means no headers
data = d.decompress(handle.read(deflate_size)) + d.flush()
expected_crc = handle.read(4)
expected_size = struct.unpack("<I", handle.read(4))[0]
assert expected_size == len(data), \
"Decompressed to %i, not %i" % (len(data), expected_size)
# Should cope with a mix of Python platforms...
crc = zlib.crc32(data)
if crc < 0:
crc = struct.pack("<i", crc)
else:
crc = struct.pack("<I", crc)
assert expected_crc == crc, \
"CRC is %s, not %s" % (crc, expected_crc)
if text_mode:
return block_size, _as_string(data)
else:
return block_size, data
class BgzfReader(object):
r"""BGZF reader, acts like a read only handle but seek/tell differ.
Let's use the BgzfBlocks function to have a peak at the BGZF blocks
in an example BAM file,
>>> try:
... from __builtin__ import open # Python 2
... except ImportError:
... from builtins import open # Python 3
...
>>> handle = open("SamBam/ex1.bam", "rb")
>>> for values in BgzfBlocks(handle):
... print("Raw start %i, raw length %i; data start %i, data length %i" % values)
Raw start 0, raw length 18239; data start 0, data length 65536
Raw start 18239, raw length 18223; data start 65536, data length 65536
Raw start 36462, raw length 18017; data start 131072, data length 65536
Raw start 54479, raw length 17342; data start 196608, data length 65536
Raw start 71821, raw length 17715; data start 262144, data length 65536
Raw start 89536, raw length 17728; data start 327680, data length 65536
Raw start 107264, raw length 17292; data start 393216, data length 63398
Raw start 124556, raw length 28; data start 456614, data length 0
>>> handle.close()
Now let's see how to use this block information to jump to
specific parts of the decompressed BAM file:
>>> handle = BgzfReader("SamBam/ex1.bam", "rb")
>>> assert 0 == handle.tell()
>>> magic = handle.read(4)
>>> assert 4 == handle.tell()
So far nothing so strange, we got the magic marker used at the
start of a decompressed BAM file, and the handle position makes
sense. Now however, let's jump to the end of this block and 4
bytes into the next block by reading 65536 bytes,
>>> data = handle.read(65536)
>>> len(data)
65536
>>> assert 1195311108 == handle.tell()
Expecting 4 + 65536 = 65540 were you? Well this is a BGZF 64-bit
virtual offset, which means:
>>> split_virtual_offset(1195311108)
(18239, 4)
You should spot 18239 as the start of the second BGZF block, while
the 4 is the offset into this block. See also make_virtual_offset,
>>> make_virtual_offset(18239, 4)
1195311108
Let's jump back to almost the start of the file,
>>> make_virtual_offset(0, 2)
2
>>> handle.seek(2)
2
>>> handle.close()
Note that you can use the max_cache argument to limit the number of
BGZF blocks cached in memory. The default is 100, and since each
block can be up to 64kb, the default cache could take up to 6MB of
RAM. The cache is not important for reading through the file in one
pass, but is important for improving performance of random access.
"""
def __init__(self, filename=None, mode="r", fileobj=None, max_cache=100):
# TODO - Assuming we can seek, check for 28 bytes EOF empty block
# and if missing warn about possible truncation (as in samtools)?
if max_cache < 1:
raise ValueError("Use max_cache with a minimum of 1")
# Must open the BGZF file in binary mode, but we may want to
# treat the contents as either text or binary (unicode or
# bytes under Python 3)
if fileobj:
assert filename is None
handle = fileobj
assert "b" in handle.mode.lower()
else:
if "w" in mode.lower() \
or "a" in mode.lower():
raise ValueError("Must use read mode (default), not write or append mode")
handle = _open(filename, "rb")
self._text = "b" not in mode.lower()
if self._text:
self._newline = "\n"
else:
self._newline = b"\n"
self._handle = handle
self.max_cache = max_cache
self._buffers = {}
self._block_start_offset = None
self._block_raw_length = None
self._load_block(handle.tell())
def _load_block(self, start_offset=None):
if start_offset is None:
# If the file is being read sequentially, then _handle.tell()
# should be pointing at the start of the next block.
# However, if seek has been used, we can't assume that.
start_offset = self._block_start_offset + self._block_raw_length
if start_offset == self._block_start_offset:
self._within_block_offset = 0
return
elif start_offset in self._buffers:
# Already in cache
self._buffer, self._block_raw_length = self._buffers[start_offset]
self._within_block_offset = 0
self._block_start_offset = start_offset
return
# Must hit the disk... first check cache limits,
while len(self._buffers) >= self.max_cache:
# TODO - Implemente LRU cache removal?
self._buffers.popitem()
# Now load the block
handle = self._handle
if start_offset is not None:
handle.seek(start_offset)
self._block_start_offset = handle.tell()
try:
block_size, self._buffer = _load_bgzf_block(handle, self._text)
except StopIteration:
# EOF
block_size = 0
if self._text:
self._buffer = ""
else:
self._buffer = b""
self._within_block_offset = 0
self._block_raw_length = block_size
# Finally save the block in our cache,
self._buffers[self._block_start_offset] = self._buffer, block_size
def tell(self):
"""Returns a 64-bit unsigned BGZF virtual offset."""
if 0 < self._within_block_offset == len(self._buffer):
# Special case where we're right at the end of a (non empty) block.
# For non-maximal blocks could give two possible virtual offsets,
# but for a maximal block can't use 65536 as the within block
# offset. Therefore for consistency, use the next block and a
# within block offset of zero.
return (self._block_start_offset + self._block_raw_length) << 16
else:
# return make_virtual_offset(self._block_start_offset,
# self._within_block_offset)
# TODO - Include bounds checking as in make_virtual_offset?
return (self._block_start_offset << 16) | self._within_block_offset
def seek(self, virtual_offset):
"""Seek to a 64-bit unsigned BGZF virtual offset."""
# Do this inline to avoid a function call,
# start_offset, within_block = split_virtual_offset(virtual_offset)
start_offset = virtual_offset >> 16
within_block = virtual_offset ^ (start_offset << 16)
if start_offset != self._block_start_offset:
# Don't need to load the block if already there
# (this avoids a function call since _load_block would do nothing)
self._load_block(start_offset)
assert start_offset == self._block_start_offset
if within_block > len(self._buffer) \
and not (within_block == 0 and len(self._buffer)==0):
raise ValueError("Within offset %i but block size only %i"
% (within_block, len(self._buffer)))
self._within_block_offset = within_block
# assert virtual_offset == self.tell(), \
# "Did seek to %i (%i, %i), but tell says %i (%i, %i)" \
# % (virtual_offset, start_offset, within_block,
# self.tell(), self._block_start_offset, self._within_block_offset)
return virtual_offset
def read(self, size=-1):
if size < 0:
raise NotImplementedError("Don't be greedy, that could be massive!")
elif size == 0:
if self._text:
return ""
else:
return b""
elif self._within_block_offset + size <= len(self._buffer):
# This may leave us right at the end of a block
# (lazy loading, don't load the next block unless we have too)
data = self._buffer[self._within_block_offset:self._within_block_offset + size]
self._within_block_offset += size
assert data # Must be at least 1 byte
return data
else:
data = self._buffer[self._within_block_offset:]
size -= len(data)
self._load_block() # will reset offsets
# TODO - Test with corner case of an empty block followed by
# a non-empty block
if not self._buffer:
return data # EOF
elif size:
# TODO - Avoid recursion
return data + self.read(size)
else:
# Only needed the end of the last block
return data
def readline(self):
i = self._buffer.find(self._newline, self._within_block_offset)
# Three cases to consider,
if i==-1:
# No newline, need to read in more data
data = self._buffer[self._within_block_offset:]
self._load_block() # will reset offsets
if not self._buffer:
return data # EOF
else:
# TODO - Avoid recursion
return data + self.readline()
elif i + 1 == len(self._buffer):
# Found new line, but right at end of block (SPECIAL)
data = self._buffer[self._within_block_offset:]
# Must now load the next block to ensure tell() works
self._load_block() # will reset offsets
assert data
return data
else:
# Found new line, not at end of block (easy case, no IO)
data = self._buffer[self._within_block_offset:i + 1]
self._within_block_offset = i + 1
# assert data.endswith(self._newline)
return data
def __next__(self):
line = self.readline()
if not line:
raise StopIteration
return line
if sys.version_info[0] < 3:
def next(self):
"""Python 2 style alias for Python 3 style __next__ method."""
return self.__next__()
def __iter__(self):
return self
def close(self):
self._handle.close()
self._buffer = None
self._block_start_offset = None
self._buffers = None
def seekable(self):
return True
def isatty(self):
return False
def fileno(self):
return self._handle.fileno()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class BgzfWriter(object):
def __init__(self, filename=None, mode="w", fileobj=None, compresslevel=6):
if fileobj:
assert filename is None
handle = fileobj
else:
if "w" not in mode.lower() \
and "a" not in mode.lower():
raise ValueError("Must use write or append mode, not %r" % mode)
if "a" in mode.lower():
handle = _open(filename, "ab")
else:
handle = _open(filename, "wb")
self._text = "b" not in mode.lower()
self._handle = handle
self._buffer = b""
self.compresslevel = compresslevel
def _write_block(self, block):
# print("Saving %i bytes" % len(block))
start_offset = self._handle.tell()
assert len(block) <= 65536
# Giving a negative window bits means no gzip/zlib headers, -15 used in samtools
c = zlib.compressobj(self.compresslevel,
zlib.DEFLATED,
-15,
zlib.DEF_MEM_LEVEL,
0)
compressed = c.compress(block) + c.flush()
del c
assert len(compressed) < 65536, "TODO - Didn't compress enough, try less data in this block"
crc = zlib.crc32(block)
# Should cope with a mix of Python platforms...
if crc < 0:
crc = struct.pack("<i", crc)
else:
crc = struct.pack("<I", crc)
bsize = struct.pack("<H", len(compressed) + 25) # includes -1
crc = struct.pack("<I", zlib.crc32(block) & 0xffffffff)
uncompressed_length = struct.pack("<I", len(block))
# Fixed 16 bytes,
# gzip magic bytes (4) mod time (4),
# gzip flag (1), os (1), extra length which is six (2),
# sub field which is BC (2), sub field length of two (2),
# Variable data,
# 2 bytes: block length as BC sub field (2)
# X bytes: the data
# 8 bytes: crc (4), uncompressed data length (4)
data = _bgzf_header + bsize + compressed + crc + uncompressed_length
self._handle.write(data)
def write(self, data):
# TODO - Check bytes vs unicode
data = _as_bytes(data)
# block_size = 2**16 = 65536
data_len = len(data)
if len(self._buffer) + data_len < 65536:
# print("Cached %r" % data)
self._buffer += data
return
else:
# print("Got %r, writing out some data..." % data)
self._buffer += data
while len(self._buffer) >= 65536:
self._write_block(self._buffer[:65536])
self._buffer = self._buffer[65536:]
def flush(self):
while len(self._buffer) >= 65536:
self._write_block(self._buffer[:65535])
self._buffer = self._buffer[65535:]
self._write_block(self._buffer)
self._buffer = b""
self._handle.flush()
def close(self):
"""Flush data, write 28 bytes empty BGZF EOF marker, and close the BGZF file."""
if self._buffer:
self.flush()
# samtools will look for a magic EOF marker, just a 28 byte empty BGZF block,
# and if it is missing warns the BAM file may be truncated. In addition to
# samtools writing this block, so too does bgzip - so we should too.
self._handle.write(_bgzf_eof)
self._handle.flush()
self._handle.close()
def tell(self):
"""Returns a BGZF 64-bit virtual offset."""
return make_virtual_offset(self._handle.tell(), len(self._buffer))
def seekable(self):
# Not seekable, but we do support tell...
return False
def isatty(self):
return False
def fileno(self):
return self._handle.fileno()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
print("Call this with no arguments and pipe uncompressed data in on stdin")
print("and it will produce BGZF compressed data on stdout. e.g.")
print("")
print("./bgzf.py < example.fastq > example.fastq.bgz")
print("")
print("The extension convention of *.bgz is to distinugish these from *.gz")
print("used for standard gzipped files without the block structure of BGZF.")
print("You can use the standard gunzip command to decompress BGZF files,")
print("if it complains about the extension try something like this:")
print("")
print("cat example.fastq.bgz | gunzip > example.fastq")
print("")
print("See also the tool bgzip that comes with samtools")
sys.exit(0)
sys.stderr.write("Producing BGZF output from stdin...\n")
w = BgzfWriter(fileobj=sys.stdout)
while True:
data = sys.stdin.read(65536)
w.write(data)
if not data:
break
# Doing close with write an empty BGZF block as EOF marker:
w.close()
sys.stderr.write("BGZF data produced\n")
|
updownlife/multipleK
|
dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/Bio/bgzf.py
|
Python
|
gpl-2.0
| 34,085
|
[
"Biopython"
] |
606c38c87cf818fd7b78232a021c0d9fc2ccc5f095d49338363997c6eb69f51b
|
#!/bin/env python
"""
tests for SSHComputingElement module
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import subprocess32 as subprocess
import shlex
import pytest
import DIRAC
from DIRAC.Resources.Computing.SSHComputingElement import SSHComputingElement
from DIRAC.Resources.Computing.BatchSystems.executeBatch import executeBatchContent
@pytest.mark.parametrize("batchSystem", ["Condor", "GE", "Host", "LSF", "OAR", "SLURM", "Torque"])
def test_generateControlScript(batchSystem):
"""Test that the control script generated by the merging operation
between a BatchSystem and executeBatch.py is:
* complete: contains the content of both files
* executable and doesn't raise any syntax error.
Example: it may check that a __future__ import is not misplaced in the script due to the
merging of the files.
"""
ce = SSHComputingElement("Test_SSHCE")
# Change the batch system file used during the control script generation
ce.loadBatchSystem(batchSystem)
# Get the local control script
result = ce._generateControlScript()
assert result["OK"] is True
source = result["Value"]
dest = "execute_batch.py"
# Simulate operation done by the scpCall method
# Copy the local control script into the "remote" control script
# As the source can be composed of multiple files, we have to copy the content of each file
sources = source.split(" ")
with open(dest, "wb") as dst:
for sourceFile in sources:
with open(sourceFile, "rb") as src:
shutil.copyfileobj(src, dst)
# Test that the control script is complete
with open(dest, "r") as dst:
dataDest = dst.read()
batchSystemDir = os.path.join(os.path.dirname(DIRAC.__file__), "Resources", "Computing", "BatchSystems")
batchSystemScript = os.path.join(batchSystemDir, "%s.py" % batchSystem)
with open(batchSystemScript, "r") as bsc:
dataBatchSystemScript = bsc.read()
assert executeBatchContent in dataDest
assert dataBatchSystemScript in dataDest
# Test the execution of the remote control script
cmd = "python -m py_compile %s" % dest
args = shlex.split(cmd)
process = subprocess.Popen(args, universal_newlines=True)
process.communicate()
assert process.returncode == 0
# Delete the control script and the .pyc file associated
os.remove(source)
os.remove(dest)
if os.path.isfile("%sc" % dest):
os.remove("%sc" % dest)
|
ic-hep/DIRAC
|
src/DIRAC/Resources/Computing/test/Test_SSHComputingElement.py
|
Python
|
gpl-3.0
| 2,569
|
[
"DIRAC"
] |
7b3e4964b8852bffe1c7c4a56c08a671168903df577fcb6518ec15ed88ff94a3
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2021 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Module for writing input files to external codes."""
from psi4.driver import constants
from psi4 import core
def _write_nbo(self, name):
basisset = self.basisset()
mints = core.MintsHelper(basisset)
mol = self.molecule()
# Populate header and coordinates.
NBO_file = f" $GENNBO NATOMS = {mol.natom()} NBAS = {basisset.nbf()} BODM "
if self.nalpha() != self.nbeta():
NBO_file += f" OPEN"
NBO_file += " $END\n $NBO $END\n $COORD\n"
NBO_file += " GENNBO expects one comment line here. So, here's a comment line.\n"
for atom in range(mol.natom()):
NBO_file += f"{mol.true_atomic_number(atom):2d} {int(mol.Z(atom)):2d} {constants.bohr2angstroms * mol.x(atom):20.12f} {constants.bohr2angstroms * mol.y(atom):20.12f} {constants.bohr2angstroms * mol.z(atom):20.12f}\n"
NBO_file += " $END\n"
# Populate basis function information.
pure_order = [
[1], # s
[103, 101, 102], # p
[255, 252, 253, 254, 251], # d: z2 xz yz x2-y2 xy
[351, 352, 353, 354, 355, 356, 357], # f
[451, 452, 453, 454, 455, 456, 457, 458, 459], #g
[551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561] #h
]
# For historical reasons, the code loops over shells first and then basis functions within the shell.
# This turns out to not give the same ordering as looping over basis functions directly.
NBO_file += " $BASIS\n"
center_string = ""
label_string = ""
count = 0
for i in range(basisset.nshell()):
shell = basisset.shell(i)
am = shell.am
for j in range(shell.nfunction):
if not (count % 10):
center_string += "\n CENTER =" if not i else "\n "
label_string += "\n LABEL =" if not i else "\n "
center_string += f" {shell.ncenter + 1:6d}"
if basisset.has_puream():
label = pure_order[am][j]
else:
label = 100 * am + j + 1
label_string += f" {label:6d}"
count += 1
NBO_file += center_string + label_string + "\n $END\n"
# Populate contraction information.Start with exponents.
NBO_file += f" $CONTRACT\n NSHELL = {basisset.nshell():6d}\n NEXP = {basisset.nprimitive():6d}\n"
function_nums = ""
prim_nums = ""
prim_indices = ""
exponents = ""
prim_index = 0
coefficients = [] # [(AM int, coefficient), ...]
for i in range(basisset.nshell()):
if not (i % 10):
function_nums += "\n NCOMP =" if not i else "\n "
prim_nums += "\n NPRIM =" if not i else "\n "
prim_indices += "\n NPTR =" if not i else "\n "
shell = basisset.shell(i)
nprim = shell.nprimitive
function_nums += f" {shell.nfunction:6d}"
prim_nums += f" {nprim:6d}"
prim_indices += f" {prim_index + 1:6d}"
for j in range(nprim):
if not (prim_index % 4):
exponents += "\n EXP =" if not prim_index else "\n "
exponents += f"{shell.exp(j):15.6E}"
prim_index += 1
coefficients.append((shell.am, shell.coef(j)))
NBO_file += function_nums + prim_nums + prim_indices + exponents
# Populate contraction coefficients.Because some basis sets(Poples with S and P) use the same
# coefficients for multiple angular momenta, we must supply coefficients for all primitives, for all
# angular momenta.This leads to many zero elements.
am_labels = ["S", "P", "D", "F", "G", "H"]
for current_nbo_section_am in range(basisset.max_am() + 1):
for i, (shell_am, coefficient) in enumerate(coefficients):
if not (i % 4):
NBO_file += f"\n C{am_labels[current_nbo_section_am]} =" if not i else "\n "
if shell_am != current_nbo_section_am:
coefficient = 0
NBO_file += f"{coefficient:15.6E}"
NBO_file += "\n $END"
# That finishes most of the basis information. Next is the overlap. It would be great if we could just dump Psi's AO
# overlap matrix, but we can 't. Per CCA guidelines, Psi' s Cartesian d and higher AM AOs aren't normalized to 1.
# While NBO can "fix" this itself, it changes other AO quantities to match and gets the Fock matrix wrong.
# Let's normalize ourselves instead.
ao_overlap = mints.ao_overlap().np
nbf = ao_overlap.shape[0]
ao_normalizer = ao_overlap.diagonal()**(-1 / 2)
def normalize(matrix, normalizer):
return ((matrix * normalizer).T * normalizer).T
normalized_ao_overlap = normalize(ao_overlap, ao_normalizer)
def write_ao_quantity(*args):
string = ""
count = 0
for quantity in args:
for i in range(nbf):
for j in range(nbf):
if not (count % 5):
string += "\n "
string += f"{quantity[i][j]:15.6E}"
count += 1
return string
NBO_file += "\n $OVERLAP"
NBO_file += write_ao_quantity(normalized_ao_overlap)
NBO_file += "\n $END"
normalized_alpha_density = normalize(self.Da_subset("AO"), 1 / ao_normalizer)
normalized_beta_density = normalize(self.Db_subset("AO"), 1 / ao_normalizer)
normalized_alpha_fock = normalize(self.Fa_subset("AO"), ao_normalizer)
NBO_file += "\n $DENSITY"
if self.same_a_b_dens():
density = normalized_alpha_density + normalized_beta_density
NBO_file += write_ao_quantity(density)
else:
NBO_file += write_ao_quantity(normalized_alpha_density, normalized_beta_density)
NBO_file += "\n $END"
NBO_file += "\n $FOCK"
if not self.same_a_b_dens():
normalized_beta_fock = normalize(self.Fb_subset("AO"), ao_normalizer)
NBO_file += write_ao_quantity(normalized_alpha_fock, normalized_beta_fock)
else:
NBO_file += write_ao_quantity(normalized_alpha_fock)
NBO_file += "\n $END"
# The last step is to write the MO coefficients.
NBO_file += "\n $LCAOMO"
def write_C_matrix(C, count):
# The C coefficients supplied the missing multiplication by the ao_normalizer in the overlap matrix before.
# For NBO, we need that multiplication gone.
C = (C.np.T / ao_normalizer).T
string = ""
for i in range(self.nmo()):
for mu in range(nbf):
count += 1
if (count % 5 == 1):
string += ("\n ")
string += f"{C[mu][i]:15.6E}"
# Pad linear dependencies
for i in range((nbf - self.nmo()) * nbf):
count += 1
if (count % 5 == 1):
string += ("\n ")
string += f"{0:15.6E}"
return count, string
count, alpha_LCAOMO = write_C_matrix(self.Ca_subset("AO", "ALL"), 0)
NBO_file += alpha_LCAOMO
if not self.same_a_b_orbs():
NBO_file += write_C_matrix(self.Cb_subset("AO", "ALL"), count)[1]
NBO_file += "\n $END\n"
#Now time to write !
with open(name, 'w') as f:
f.write(NBO_file)
core.Wavefunction.write_nbo = _write_nbo
def _write_molden(self, filename=None, do_virtual=None, use_natural=False):
"""Function to write wavefunction information in *wfn* to *filename* in
molden format. Will write natural orbitals from *density* (MO basis) if supplied.
Warning! Most post-SCF Wavefunctions do not build the density as this is often
much more costly than the energy. In addition, the Wavefunction density attributes
(Da and Db) return the SO density and must be transformed to the MO basis
to use with this function.
.. versionadded:: 0.5
*wfn* parameter passed explicitly
:returns: None
:type filename: string
:param filename: destination file name for MOLDEN file (optional)
:type do_virtual: bool
:param do_virtual: do write all the MOs to the MOLDEN file (true) or discard the unoccupied MOs, not valid for NO's (false) (optional)
:type use_natural: bool
:param use_natural: write natural orbitals determined from density on wavefunction
:examples:
1. Molden file with the Kohn-Sham orbitals of a DFT calculation.
>>> E, wfn = energy('b3lyp', return_wfn=True)
>>> wfn.molden('mycalc.molden')
2. Molden file with the natural orbitals of a CCSD computation. For correlated methods, an energy call will not compute the density.
"properties" or "gradient" must be called.
>>> E, wfn = properties('ccsd', return_wfn=True)
>>> wfn.molden('ccsd_no.molden', use_natural=True)
3. To supply a custom density matrix, manually set the Da and Db of the wavefunction.
This is used, for example, to write natural orbitals coming from a root computed
by a ``CIWavefunction`` computation, e.g., ``detci``, ``fci``, ``casscf``.`
The first two arguments of ``get_opdm`` can be set to ``n, n`` where n => 0 selects the root to
write out, provided these roots were computed, see :term:`NUM_ROOTS <NUM_ROOTS (DETCI)>`. The
third argument controls the spin (``"A"``, ``"B"`` or ``"SUM"``) and the final
boolean option determines whether inactive orbitals are included.
>>> E, wfn = energy('detci', return_wfn=True)
>>> wfn.Da() = wfn.get_opdm(0, 0, "A", True)
>>> wfn.Db() = wfn.get_opdm(0, 0, "B", True)
>>> molden(wfn, 'no_root1.molden', use_natural=True)
"""
if filename is None:
filename = core.get_writer_file_prefix(self.molecule().name()) + ".molden"
if do_virtual is None:
do_virtual = bool(core.get_option("SCF", "MOLDEN_WITH_VIRTUAL"))
basisset = self.basisset()
mol = self.molecule()
# Header and geometry (Atom, Atom #, Z, x, y, z)
mol_string = '[Molden Format]\n[Atoms] (AU)\n'
for atom in range(mol.natom()):
mol_string += f"{mol.symbol(atom):2s} {atom+1:2d} {int(mol.Z(atom)):3d} {mol.x(atom):20.10f} {mol.y(atom):20.10f} {mol.z(atom):20.10f}\n"
# Dump basis set
mol_string += '[GTO]\n'
for atom in range(mol.natom()):
mol_string += f" {atom+1:d} 0\n"
for rel_shell_idx in range(basisset.nshell_on_center(atom)):
abs_shell_idx = basisset.shell_on_center(atom, rel_shell_idx)
shell = basisset.shell(abs_shell_idx)
mol_string += f" {shell.amchar:s}{shell.nprimitive:5d} 1.00\n"
for prim in range(shell.nprimitive):
mol_string += f"{shell.exp(prim):20.10f} {shell.original_coef(prim):20.10f}\n"
mol_string += '\n'
#
if use_natural:
# Alphas
nmopi = self.nmopi()
#MO_Da = core.Matrix("MO Alpha Density Matrix", nmopi, nmopi)
#MO_Da.transform(self.Da(), self.Ca().transpose())
MO_Da = self.Da_subset("MO") #MO_Da.transform(self.Da(), self.Ca())
NO_Ra = core.Matrix("NO Alpha Rotation Matrix", nmopi, nmopi)
occupation_a = core.Vector(nmopi)
MO_Da.diagonalize(NO_Ra, occupation_a, core.DiagonalizeOrder.Descending)
Ca = core.doublet(self.Ca(), NO_Ra, False, False)
epsilon_a = occupation_a
# Betas
#MO_Db = core.Matrix("MO Beta Density Matrix", nmopi, nmopi)
#MO_Db.transform(self.Db(), self.Cb().transpose())
MO_Db = self.Db_subset("MO")
NO_Rb = core.Matrix("NO Beta Rotation Matrix", nmopi, nmopi)
occupation_b = core.Vector(nmopi)
MO_Db.diagonalize(NO_Rb, occupation_b, core.DiagonalizeOrder.Descending)
Cb = core.doublet(self.Cb(), NO_Rb, False, False)
epsilon_b = occupation_b
else:
Ca = self.Ca()
Cb = self.Cb()
occupation_a = self.occupation_a()
occupation_b = self.occupation_b()
epsilon_a = self.epsilon_a()
epsilon_b = self.epsilon_b()
# Convert C matrices to AO MO basis. Ca_subset costs information about which symmetry an orbital originally had, which is why we can't use it.
aotoso = self.aotoso()
Ca_ao_mo = core.doublet(aotoso, Ca, False, False).nph
Cb_ao_mo = core.doublet(aotoso, Cb, False, False).nph
ao_overlap = self.mintshelper().ao_overlap().np
# Convert from Psi4 internal normalization to the unit normalization expected by Molden
ao_normalizer = ao_overlap.diagonal()**(-1 / 2)
Ca_ao_mo = core.Matrix.from_array([(i.T / ao_normalizer).T for i in Ca_ao_mo])
Cb_ao_mo = core.Matrix.from_array([(i.T / ao_normalizer).T for i in Cb_ao_mo])
# Reorder AO x MO matrix to fit Molden conventions
'''
Reordering expected by Molden
P: x, y, z
5D: D 0, D+1, D-1, D+2, D-2
6D: xx, yy, zz, xy, xz, yz
7F: F 0, F+1, F-1, F+2, F-2, F+3, F-3
10F: xxx, yyy, zzz, xyy, xxy, xxz, xzz, yzz, yyz, xyz
9G: G 0, G+1, G-1, G+2, G-2, G+3, G-3, G+4, G-4
15G: xxxx, yyyy, zzzz, xxxy, xxxz, yyyz, zzzx, zzzy, xxyy, xxzz, yyzz, xxyz, yyxz, zzxy
Molden does not handle angular momenta higher than G
'''
molden_cartesian_order = [
[2,0,1,0,0,0,0,0,0,0,0,0,0,0,0], # p
[0,3,4,1,5,2,0,0,0,0,0,0,0,0,0], # d
[0,4,5,3,9,6,1,8,7,2,0,0,0,0,0], # f
[0,3,4,9,12,10,5,13,14,7,1,6,11,8,2] # g
]
nirrep = self.nirrep()
count = 0 # Keeps track of count for reordering
temp_a = Ca_ao_mo.clone() # Placeholders for original AO x MO matrices
temp_b = Cb_ao_mo.clone()
for i in range(basisset.nshell()):
am = basisset.shell(i).am
if (am == 1 and basisset.has_puream()) or (am > 1 and am < 5 and basisset.shell(i).is_cartesian()):
for j in range(basisset.shell(i).nfunction):
for h in range(nirrep):
for k in range(Ca_ao_mo.coldim()[h]):
Ca_ao_mo.set(h,count + molden_cartesian_order[am-1][j],k,temp_a.get(h,count+j,k))
Cb_ao_mo.set(h,count + molden_cartesian_order[am-1][j],k,temp_b.get(h,count+j,k))
count += basisset.shell(i).nfunction
# Dump MO information
if basisset.has_puream():
mol_string += '[5D]\n[7F]\n[9G]\n\n'
ct = mol.point_group().char_table()
mol_string += '[MO]\n'
mo_dim = self.nmopi() if do_virtual else (self.doccpi() + self.soccpi())
# Alphas. If Alphas and Betas are the same, then only Alphas with double occupation will be written (see line marked "***")
mos = []
for h in range(nirrep):
for n in range(mo_dim[h]):
mos.append((epsilon_a.get(h, n), (h, n)))
# Sort mos based on energy
def mosSort(element):
return element[0]
mos.sort(key=mosSort)
for i in range(len(mos)):
h, n = mos[i][1]
mol_string += f" Sym= {ct.gamma(h).symbol():s}\n Ene= {epsilon_a.get(h, n):24.10e}\n Spin= Alpha\n"
if self.same_a_b_orbs() and self.epsilon_a() == self.epsilon_b() and self.same_a_b_dens():
mol_string += f" Occup= {occupation_a.get(h, n) + occupation_b.get(h, n):24.10e}\n"
else:
mol_string += f" Occup= {occupation_a.get(h, n):24.10e}\n"
for so in range(self.nso()):
mol_string += f"{so+1:3d} {Ca_ao_mo.get(h, so, n):24.10e}\n"
# Betas
mos = []
if not self.same_a_b_orbs() or self.epsilon_a() != self.epsilon_b() or not self.same_a_b_dens():
for h in range(nirrep):
for n in range(mo_dim[h]):
mos.append((self.epsilon_b().get(h, n), (h, n)))
mos.sort(key=mosSort)
for i in range(len(mos)):
h, n = mos[i][1]
mol_string += f" Sym= {ct.gamma(h).symbol():s}\n Ene= {epsilon_b.get(h, n):24.10e}\n Spin= Beta\n " \
f"Occup= {occupation_b.get(h, n):24.10e}\n"
for so in range(self.nso()):
mol_string += f"{so+1:3d} {Cb_ao_mo.get(h, so, n):24.10e}\n"
# Write Molden string to file
with open(filename,'w') as fn:
fn.write(mol_string)
core.Wavefunction.write_molden = _write_molden
|
jturney/psi4
|
psi4/driver/p4util/writer.py
|
Python
|
lgpl-3.0
| 16,954
|
[
"Psi4"
] |
7e4dbfcdd2b5f2886da8fb28b83d26e2a127b558c3c371a88d43e3e920b66b0f
|
#!/usr/bin/env python
# encoding: utf-8
"""Legacy metrics script."""
import os
import tabulate
from modularodm import Q
from framework.analytics import get_basic_counters
from website import models
from website import settings
from website.app import init_app
from website.addons.osfstorage.model import OsfStorageFileNode
from website.addons.osfstorage.model import OsfStorageTrashedFileNode
def main():
number_users = models.User.find().count()
projects = models.Node.find(
Q('category', 'eq', 'project') &
Q('is_deleted', 'eq', False) &
Q('is_folder', 'ne', True)
)
projects_forked = list(models.Node.find(
Q('category', 'eq', 'project') &
Q('is_deleted', 'eq', False) &
Q('is_folder', 'ne', True) &
Q('is_fork', 'eq', True)
))
projects_registered = models.Node.find(
Q('category', 'eq', 'project') &
Q('is_deleted', 'eq', False) &
Q('is_folder', 'ne', True) &
Q('is_registration', 'eq', True)
)
pf = []
for p in projects_forked:
if not p.contributors[0]:
continue
name = p.contributors[0].fullname
if unicode(name) not in [u'Jeffres R. Spies', 'Brian A. Nosek']:
pf.append(p)
pr = []
for p in projects_registered:
name = p.contributors[0].fullname
if not p.contributors[0]:
continue
if not unicode(name)==u'Jeffrey R. Spies' and not unicode(name)==u'Brian A. Nosek':
pr.append(p)
number_projects = len(projects)
number_projects_public = models.Node.find(
Q('category', 'eq', 'project') &
Q('is_deleted', 'eq', False) &
Q('is_folder', 'ne', True) &
Q('is_public', 'eq', True)
).count()
number_projects_forked = len(pf)
number_projects_registered = len(pr)
##############
number_downloads_total = 0
number_downloads_unique = 0
contributors_per_project = []
contrib = {}
for project in projects:
contributors_per_project.append(len(project.contributors))
for person in project.contributors:
if not person:
continue
if person._id not in contrib:
contrib[person._id] = []
for neighbor in project.contributors:
if not neighbor:
continue
if neighbor._id not in contrib[person._id]:
contrib[person._id].append(neighbor._id)
addon = project.get_addon('osfstorage')
for filenode in OsfStorageFileNode.find(Q('node_settings', 'eq', addon) & Q('kind', 'eq', 'file')):
for idx, version in enumerate(filenode.versions):
page = ':'.join(['download', project._id, filenode._id, str(idx)])
unique, total = get_basic_counters(page)
number_downloads_total += total or 0
number_downloads_unique += unique or 0
for filenode in OsfStorageTrashedFileNode.find(Q('node_settings', 'eq', addon) & Q('kind', 'eq', 'file')):
for idx, version in enumerate(filenode.versions):
page = ':'.join(['download', project._id, filenode._id, str(idx)])
unique, total = get_basic_counters(page)
number_downloads_total += total or 0
number_downloads_unique += unique or 0
table = tabulate.tabulate(
[
['number_users', number_users],
['number_projects', number_projects],
['number_projects_public', number_projects_public],
['number_projects_forked', number_projects_forked],
['number_projects_registered', number_projects_registered],
['number_downloads_total', number_downloads_total],
['number_downloads_unique', number_downloads_unique],
],
headers=['label', 'value'],
)
with open(os.path.join(settings.ANALYTICS_PATH, 'legacy.txt'), 'w') as fp:
fp.write(table)
if __name__ == '__main__':
init_app()
main()
|
barbour-em/osf.io
|
scripts/metrics.py
|
Python
|
apache-2.0
| 4,068
|
[
"Brian"
] |
cfdc5910b0153efa8120ab45bbb59435b26dfda0cae04fe9ab0cc1ed684e88a1
|
# Copyright (C) 2013-2016 Martin Vejmelka, UC Denver
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
# A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import absolute_import
from __future__ import print_function
from fmda.fuel_moisture_da import execute_da_step, retrieve_mesowest_observations
from fmda.fuel_moisture_model import FuelMoistureModel
from ingest.grib_file import GribFile, GribMessage
from ingest.rtma_source import RTMA
from utils import Dict, ensure_dir, utc_to_esmf, delete, force_copy, move
from vis.postprocessor import scalar_field_to_raster, scatter_to_raster
from ssh_shuttle import send_product_to_server
import netCDF4
import numpy as np
import json
import sys
import logging
import os
import os.path as osp
import glob
from datetime import datetime, timedelta
import pytz
import six
# setup environment
sys_cfg = Dict(json.load(open('etc/conf.json')))
cfg = Dict(json.load(open('etc/rtma_cycler.json')))
meso_token = json.load(open('etc/tokens.json'))['mesowest']
def write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels=None, alpha=None):
"""
Write postprocessing files.
:param post: the UTC cycle time
:param cycle: the UTC cycle time
:param region_cfg: the region configuration
:param wksp_path: the workspace path
:return: the postprocessing path
"""
raster_name = cycle_dir + '-%s-raster.png' % name
cb_name = cycle_dir + '-%s-raster-cb.png' % name
with open(osp.join(postproc_path, raster_name), 'wb') as f:
f.write(raster_png)
with open(osp.join(postproc_path, cb_name), 'wb') as f:
f.write(cb_png)
mf["1"][esmf_cycle][name] = { 'raster' : raster_name, 'coords' : coords, 'colorbar': cb_name }
if levels is not None:
mf["1"][esmf_cycle][name].update({ 'levels' : levels })
if alpha is not None:
mf["1"][esmf_cycle][name].update({ 'alpha' : alpha })
def postprocess_cycle(cycle, region_cfg, wksp_path, bounds=None):
"""
Build rasters from the computed fuel moisture.
:param cycle: the UTC cycle time
:param region_cfg: the region configuration
:param wksp_path: the workspace path
:param bounds: bounding box of the post-processing
:return: the postprocessing path
"""
prev_cycle = cycle-timedelta(hours=1)
post_cycle = cycle+timedelta(hours=1)
model_path = compute_model_path(cycle, region_cfg.code, wksp_path)
year_month = '%04d%02d' % (cycle.year, cycle.month)
prev_year_month = '%04d%02d' % (prev_cycle.year, prev_cycle.month)
cycle_dir = 'fmda-%s-%04d%02d%02d-%02d' % (region_cfg.code, cycle.year, cycle.month, cycle.day, cycle.hour)
prev_cycle_dir = 'fmda-%s-%04d%02d%02d-%02d' % (region_cfg.code, prev_cycle.year, prev_cycle.month, prev_cycle.day, prev_cycle.hour)
postproc_path = osp.join(wksp_path, year_month, cycle_dir)
prev_postproc_path = osp.join(wksp_path, prev_year_month, prev_cycle_dir)
manifest_name = cycle_dir + '.json'
complete_manifest_name = 'fmda-%s.json' % region_cfg.code
if not is_cycle_computed(cycle, region_cfg, wksp_path) and not osp.exists(prev_postproc_path):
logging.warning('CYCLER postprocessing failed for time {}'.format(str(cycle)))
return None
var_wisdom = {
'dfm' : {
'native_unit' : '-',
'colorbar' : '-',
'colormap' : 'jet_r',
'scale' : [0.0, 0.4]
},
'lfm' : {
'native_unit' : '-',
'colorbar' : '-',
'colormap' : 'jet_r',
'scale' : [0.0, 3.0],
'marker' : '^'
},
'EQUILd FM' : {
'name' : 'Drying equilibrium FM',
'native_unit' : '-',
'colorbar' : 'i-',
'colormap' : 'jet_r',
'scale' : [0.0, 0.4]
},
'EQUILw FM' : {
'name' : 'Wetting equilibrium FM',
'native_unit' : '-',
'colorbar' : 'i-',
'colormap' : 'jet_r',
'scale' : [0.0, 0.4]
},
'RH' : {
'name' : 'Relative humidity',
'native_unit' : '%',
'colorbar' : '%',
'colormap' : 'jet_r',
'scale' : [0.0, 100.0]
},
'TD' : {
'name' : 'Dew point temperature at 2m',
'native_unit' : 'K',
'colorbar' : 'F',
'colormap' : 'jet',
'scale' : [270.0, 320.0]
},
'T2' : {
'name' : 'Temperature at 2m',
'native_unit' : 'K',
'colorbar' : 'F',
'colormap' : 'jet',
'scale' : [270.0, 320.0]
},
'PRECIPA' : {
'name' : 'RTMA precipa',
'native_unit' : 'kg/m^2/h',
'colorbar' : 'kg/m^2/h',
'colormap' : 'jet_r',
'scale' : [0.0, 2.0]
},
'PRECIP' : {
'name' : 'Precipitation',
'native_unit' : 'mm/h',
'colorbar' : 'mm/h',
'colormap' : 'jet_r',
'scale' : [0.0, 2.0]
},
'HGT' : {
'name' : 'Terrain height',
'native_unit' : 'm',
'colorbar' : 'm',
'colormap' : 'jet_r',
'scale' : [-86.0, 4500.0]
},
}
show = ['TD','PRECIPA','T2','HGT','PRECIP','RH','EQUILd FM','EQUILw FM']
show = ['T2','HGT','PRECIP','RH']
esmf_cycle = utc_to_esmf(cycle)
mf = { "1" : {esmf_cycle : {}}}
ensure_dir(osp.join(postproc_path, manifest_name))
if not is_cycle_computed(cycle, region_cfg, wksp_path):
logging.info('CYCLER copying postprocessing from cycle {} to cycle {}'.format(str(prev_cycle),str(cycle)))
prev_manifest_name = prev_cycle_dir + '.json'
prev_esmf_cycle = utc_to_esmf(prev_cycle)
prev_mf = json.load(open(osp.join(prev_postproc_path, prev_manifest_name), 'r'))
for name in prev_mf['1'][prev_esmf_cycle].keys():
prev_raster_name = prev_mf['1'][prev_esmf_cycle][name]['raster']
prev_cb_name = prev_mf['1'][prev_esmf_cycle][name]['colorbar']
raster_name = cycle_dir + '-%s-raster.png' % name
cb_name = cycle_dir + '-%s-raster-cb.png' % name
coords = prev_mf['1'][prev_esmf_cycle][name]['coords']
alpha = prev_mf['1'][prev_esmf_cycle][name].get('alpha',None)
force_copy(osp.join(prev_postproc_path, prev_raster_name),osp.join(postproc_path, raster_name))
force_copy(osp.join(prev_postproc_path, prev_cb_name),osp.join(postproc_path, cb_name))
if alpha:
mf["1"][esmf_cycle][name] = { 'raster' : raster_name, 'coords' : coords, 'colorbar' : cb_name, 'alpha' : alpha }
else:
mf["1"][esmf_cycle][name] = { 'raster' : raster_name, 'coords' : coords, 'colorbar' : cb_name }
else:
if bounds is None:
bounds = (region_cfg.bbox[1],region_cfg.bbox[3],region_cfg.bbox[0],region_cfg.bbox[2])
# read in the longitudes and latitudes
geo_path = osp.join(wksp_path, '%s-geo.nc' % region_cfg.code)
logging.info('CYCLER reading longitudes and latitudes from NetCDF file %s' % geo_path )
d = netCDF4.Dataset(geo_path)
lats = d.variables['XLAT'][:,:]
lons = d.variables['XLONG'][:,:]
d.close()
# read and process model variables
with netCDF4.Dataset(model_path) as d:
for name in show:
raster_png, coords, cb_png, levels = scalar_field_to_raster(d.variables[name][:,:], lats, lons, var_wisdom[name])
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, .5)
for i,name in [(0, '1-hr DFM'), (1, '10-hr DFM'), (2, '100-hr DFM')]:
fm_wisdom = var_wisdom['dfm']
fm_wisdom['name'] = 'Estimated %s' % name
raster_png, coords, cb_png, levels = scalar_field_to_raster(d.variables['FMC_GC'][:,:,i], lats, lons, fm_wisdom)
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, .5)
if osp.exists('src/ingest/MesoDB'):
from ingest.MesoDB.mesoDB import mesoDB
db = mesoDB('ingest/MesoDB')
db.update['startTime'] = cycle - timedelta(hours=1)
db.update['endTime'] = cycle + timedelta(hours=1)
db.params['startTime'] = cycle - timedelta(hours=1)
db.params['endTime'] = cycle + timedelta(hours=1)
db.params['longitude1'], db.params['longitude2'], db.params['latitude1'], db.params['latitude2'] = bounds
if is_cycle_computed(cycle, region_cfg, wksp_path):
db.params['updateDB'] = False
df = db.get_DB()
st = db.sites()
data = df.groupby('STID').mean().join(st[['LONGITUDE','LATITUDE']])
meso_wisdom = var_wisdom['dfm']
meso_wisdom['name'] = 'MesoWest 10-hr DFM'
meso_wisdom['bbox'] = bounds
meso_wisdom['text'] = False
raster_png, coords, cb_png, levels = scatter_to_raster(np.array(data['fm10'])/100.,
np.array(data['LATITUDE']).astype(float),
np.array(data['LONGITUDE']).astype(float), meso_wisdom)
name = 'MESO 10-hr DFM'
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, 1.)
# NFMDB observations
if osp.exists('src/ingest/FMDB'):
from ingest.FMDB.FMDB import FMDB
from ingest.FMDB.utils import filter_outliers
period_length = 7 # period in days
period_num = np.ceil(cycle.day/period_length)
db = FMDB('ingest/NFMDB')
db.params['startYear'] = 2019
data = db.get_data()
data = filter_outliers(data)
data['fuel_type'] = data['fuel_type'].fillna('None').str.upper()
data['fuel_variation'] = data['fuel_variation'].fillna('None').str.upper()
sts = db.sites()
data = data.join(sts[['lng','lat']],'site_number')
# mask space
lats = data['lat']
lons = data['lng']
data = data[np.logical_and(lats <= bounds[3],
np.logical_and(lats >= bounds[2],
np.logical_and(lons <= bounds[1],
lons >= bounds[0])))]
dates = data['date'].dt.tz_localize(pytz.UTC)
# calculate top 5 LFM to always plot the same
top = 5
hist_data = data[dates.dt.year <= 2020]
hist_dfm_mask = np.array(['-HOUR' in ft for ft in np.array(hist_data['fuel_type'])]).astype(bool)
hist_df_lfm = hist_data[~hist_dfm_mask].reset_index(drop=True)
fts = np.array(hist_df_lfm[['fuel_type','percent']].groupby('fuel_type').count().sort_values(by='percent',ascending=False).index[:top])
# mask time
start = cycle.replace(day=int(period_length*(period_num-1)+1),hour=0,minute=0,second=0,microsecond=0)
end = cycle
data = data[np.logical_and(dates >= start, dates <= end)]
cycle_dir = 'fmda-%s-%04d%02d%02d-%02d' % (region_cfg.code, start.year, start.month, start.day, start.hour)
# mask dead and live fuel moisture
dfm_mask = np.array(['-HOUR' in ft for ft in np.array(data['fuel_type'])]).astype(bool)
df_dfm = data[dfm_mask].reset_index(drop=True)
df_lfm = data[~dfm_mask].reset_index(drop=True)
# plot NFMDB dead fuel moisture
for i,name in [('1-HOUR','NFMDB 1-hr DFM'),('10-HOUR','NFMDB 10-hr DFM'),('100-HOUR','NFMDB 100-hr DFM'),('1000-HOUR','NFMDB 1000-hr DFM')]:
fmdb_wisdom = var_wisdom['dfm']
fmdb_wisdom['name'] = name
fmdb_wisdom['bbox'] = bounds
fmdb_wisdom['text'] = True
fmdb_wisdom['size'] = 40
fmdb_wisdom['linewidth'] = 1.
data = df_dfm[df_dfm['fuel_type'] == i]
raster_png, coords, cb_png, levels = scatter_to_raster(np.array(data['percent'])/100.,
np.array(data['lat']),
np.array(data['lng']), fmdb_wisdom)
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, 1.)
# plot NFMDB live fuel moisture
df_lfm = df_lfm.sort_values('date').groupby(['site_number','fuel_type']).last().reset_index()
for ft in fts:
name = 'NFMDB {} LFM'.format(ft)
fmdb_wisdom = var_wisdom['lfm']
fmdb_wisdom['name'] = name
fmdb_wisdom['bbox'] = bounds
fmdb_wisdom['text'] = True
fmdb_wisdom['size'] = 40
fmdb_wisdom['linewidth'] = 1.
data = df_lfm[df_lfm['fuel_type'] == ft]
raster_png, coords, cb_png, levels = scatter_to_raster(np.array(data['percent'])/100.,
np.array(data['lat']),
np.array(data['lng']), fmdb_wisdom)
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, 1.)
name = 'NFMDB OTHERS LFM'
fmdb_wisdom = var_wisdom['lfm']
fmdb_wisdom['name'] = name
fmdb_wisdom['bbox'] = bounds
fmdb_wisdom['text'] = True
fmdb_wisdom['size'] = 40
fmdb_wisdom['linewidth'] = 1.
data = df_lfm[~df_lfm['fuel_type'].isin(fts)]
data = data.groupby('site_number').mean()
raster_png, coords, cb_png, levels = scatter_to_raster(np.array(data['percent'])/100.,
np.array(data['lat']),
np.array(data['lng']), fmdb_wisdom)
write_postprocess(mf, postproc_path, cycle_dir, esmf_cycle, name, raster_png, coords, cb_png, levels, 1.)
logging.info('writing manifest file %s' % osp.join(postproc_path, manifest_name) )
json.dump(mf, open(osp.join(postproc_path, manifest_name), 'w'), indent=1, separators=(',',':'))
logging.info(json.dumps(mf))
if osp.exists(osp.join(prev_postproc_path, complete_manifest_name)):
complete_mf = json.load(open(osp.join(prev_postproc_path, complete_manifest_name), 'r'))
complete_mf['1'].update(mf['1'])
json.dump(complete_mf, open(osp.join(postproc_path, complete_manifest_name), 'w'), indent=1, separators=(',',':'))
else:
json.dump(mf, open(osp.join(postproc_path, complete_manifest_name), 'w'), indent=1, separators=(',',':'))
return postproc_path
def compute_model_path(cycle, region_code, wksp_path, ext='nc'):
"""
Construct a relative path to the fuel moisture model file
for the region code and cycle.
:param cycle: the UTC cycle time
:param region_code: the code of the region
:param wksp_path: the workspace path
:return: a relative path (w.r.t. workspace and region) of the fuel model file
"""
year_month = '%04d%02d' % (cycle.year, cycle.month)
filename = 'fmda-%s-%04d%02d%02d-%02d.%s' % (region_code, cycle.year, cycle.month, cycle.day, cycle.hour, ext)
return osp.join(wksp_path,region_code,year_month,filename)
def find_region_indices(glat,glon,minlat,maxlat,minlon,maxlon):
"""
Find the indices i1:i2 (lat dimension) and j1:j2 (lon dimension)
that contain the desired region (minlat-maxlat,minlon-maxlon).
:param glat: the grid latitudes
:param glon: the grid longitudes
:param minlat: the minimum latitude
:param maxlat: the maximum latitude
:param minlon: the minimum longitude
:param maxlon: the maximum longitude
:return: dim 0 min/max indices and dim1 min/max indices
"""
i1, i2, j1, j2 = 0, glat.shape[0], 0, glat.shape[1]
done = False
while not done:
done = True
tmp = np.where(np.amax(glat[:, j1:j2],axis=1) < minlat)[0]
if len(tmp):
tmp = tmp[-1]
else:
tmp = i1
if i1 != tmp:
i1 = tmp
done = False
tmp = np.where(np.amin(glat[:, j1:j2],axis=1) > maxlat)[0]
if len(tmp):
tmp = tmp[0]
else:
tmp = i2
if i2 != tmp:
i2 = tmp
done = False
tmp = np.where(np.amax(glon[i1:i2,:],axis=0) < minlon)[0]
if len(tmp):
tmp = tmp[-1]
else:
tmp = j1
if j1 != tmp:
j1 = tmp
done = False
tmp = np.where(np.amin(glon[i1:i2,:],axis=0) > maxlon)[0]
if len(tmp):
tmp = tmp[0]
else:
tmp = j2
if j2 != tmp:
j2 = tmp
done = False
return i1,i2,j1,j2
def compute_rtma_bounds(bbox):
"""
Compute bounds from RTMA data even when RTMA data is not available from terrain static data
:param bbox: the bounding box of the data
:return: a tuple containing bound coordinates (min_lon,max_lon,min_lat,max_lat)
"""
lats,lons = GribFile('static/ds.terrainh.bin')[1].latlons()
i1, i2, j1, j2 = find_region_indices(lats, lons, bbox[0], bbox[2], bbox[1], bbox[3])
lats,lons = lats[i1:i2,j1:j2], lons[i1:i2,j1:j2]
return (lons.min(), lons.max(), lats.min(), lats.max())
def load_rtma_data(rtma_data, bbox):
"""
Load relevant RTMA fields and return them
:param rtma_data: a dictionary mapping variable names to local paths
:param bbox: the bounding box of the data
:return: a tuple containing t2, rh, lats, lons
"""
gf = GribFile(rtma_data['temp'])[1]
lats, lons = gf.latlons()
# bbox format: minlat, minlon, maxlat, maxlon
i1, i2, j1, j2 = find_region_indices(lats, lons, bbox[0], bbox[2], bbox[1], bbox[3])
t2 = np.ma.array(gf.values())[i1:i2,j1:j2] # temperature at 2m in K
td = np.ma.array(GribFile(rtma_data['td'])[1].values())[i1:i2,j1:j2] # dew point in K
precipa = np.ma.array(GribFile(rtma_data['precipa'])[1].values())[i1:i2,j1:j2] # precipitation
hgt = np.ma.array(GribFile('static/ds.terrainh.bin')[1].values())[i1:i2,j1:j2]
logging.info('t2 min %s max %s' % (np.min(t2),np.max(t2)))
logging.info('td min %s max %s' % (np.min(td),np.max(td)))
logging.info('precipa min %s max %s' % (np.min(precipa),np.max(precipa)))
logging.info('hgt min %s max %s' % (np.min(hgt),np.max(hgt)))
# compute relative humidity
rh = 100*np.exp(17.625*243.04*(td - t2) / (243.04 + t2 - 273.15) / (243.0 + td - 273.15))
return td, t2, rh, precipa, hgt, lats[i1:i2,j1:j2], lons[i1:i2,j1:j2]
def compute_equilibria(T, H):
"""
Compute the drying and wetting equilibrium given temperature and relative humidity.
:param T: the temperature at 2 meters in K
:param H: the relative humidity in percent
:return: a tuple containing the drying and wetting equilibrium
"""
d = 0.924*H**0.679 + 0.000499*np.exp(0.1*H) + 0.18*(21.1 + 273.15 - T)*(1 - np.exp(-0.115*H))
w = 0.618*H**0.753 + 0.000454*np.exp(0.1*H) + 0.18*(21.1 + 273.15 - T)*(1 - np.exp(-0.115*H))
d *= 0.01
w *= 0.01
return d, w
def fmda_advance_region(cycle, cfg, rtma, wksp_path, lookback_length, meso_token):
"""
Advance the fuel moisture estimates in the region specified by the configuration.
The function assumes that the fuel moisture model has not been advanced to this
cycle yet and will overwrite any previous computations.
Control flow:
1) read in RTMA variables
2) check if there is a stored FM model for previous cycle
2a) yes -> load it, advance one time-step, perform DA
2b) no -> compute equilibrium, use background covariance to do DA
3) store model
:param cycle: the datetime indicating the processed cycle in UTC
:param cfg: the configuration dictionary specifying the region
:param rtma: the RTMA object that can be used to retrieve variables for this cycle
:param wksp_path: the workspace path for the cycler
:param lookback_length: number of cycles to search before we find a computed cycle
:param meso_token: the mesowest API access token or a list of them
:return: the model advanced and assimilated at the current cycle
"""
logging.info("rtma_cycler.fmda_advance_region: %s" % str(cycle))
model = None
prev_cycle = cycle - timedelta(hours=1)
prev_model_path = compute_model_path(prev_cycle, cfg.code, wksp_path)
if not osp.exists(prev_model_path):
logging.info('CYCLER cannot find model from previous cycle %s' % str(prev_cycle))
if lookback_length > 0:
model = fmda_advance_region(cycle - timedelta(hours=1), cfg, rtma, wksp_path, lookback_length - 1, meso_token)
else:
logging.info('CYCLER found previous model for cycle %s.' % str(prev_cycle))
model = FuelMoistureModel.from_netcdf(prev_model_path)
# retrieve the variables and make sure they are available (we should not be here if they are not)
try:
dont_have_vars, have_vars = rtma.retrieve_rtma(cycle)
except ValueError as e:
logging.error(e)
sys.exit(1)
assert not dont_have_vars
logging.info('CYCLER loading RTMA data for cycle %s.' % str(cycle))
TD, T2, RH, precipa, hgt, lats, lons = load_rtma_data(have_vars, cfg.bbox)
Ed, Ew = compute_equilibria(T2, RH)
rain = precipa[:,:] + 0
# remove rain that is too small to make any difference
rain[rain < 0.01] = 0
# remove bogus rain that is too large
rain[rain > 1e10] = 0
dom_shape = T2.shape
# store the lons/lats for this domain
geo_path = osp.join(wksp_path, '%s-geo.nc' % cfg.code)
if not osp.isfile(geo_path):
logging.info('CYCLER initializing new file %s.' % (geo_path))
d = netCDF4.Dataset(geo_path, 'w', format='NETCDF4')
d.createDimension('south_north', dom_shape[0])
d.createDimension('west_east', dom_shape[1])
xlat = d.createVariable('XLAT', 'f4', ('south_north', 'west_east'))
xlat[:,:] = lats
xlong = d.createVariable('XLONG', 'f4', ('south_north', 'west_east'))
xlong[:,:] = lons
d.close()
else:
logging.info('CYCLER file already exists: %s.' % (geo_path))
# the process noise matrix
Q = np.diag([1e-4,5e-5,1e-5,1e-6,1e-6])
# background covariance
P0 = np.diag([0.01,0.01,0.01,0.001,0.001])
# check if we must start from equilibrium
if model is None:
logging.info('CYCLER initializing from equilibrium for cycle %s.' % (str(cycle)))
# setup model parameters
Nk = 3
Tk = np.array([1.0, 10.0, 100.0])
m0 = np.expand_dims(0.5 * (Ed + Ew), axis=2)
model = FuelMoistureModel(m0[:,:,[0,0,0]], Tk, P0)
else:
logging.info('CYCLER advancing model one hour to cycle %s.' % (str(cycle)))
dt = 3600 # always 1 hr step in RTMA
model.advance_model(Ed, Ew, rain, dt, Q)
logging.info('CYCLER retrieving fm-10 observations for cycle %s.' % (str(cycle)))
# perform assimilation with mesowest observations
tm_start = cycle - timedelta(minutes=30)
tm_end = cycle + timedelta(minutes=30)
fm10 = retrieve_mesowest_observations(meso_token, tm_start, tm_end, lats, lons, hgt)
fm10v = []
for fm10_obs in fm10.values():
for obs in fm10_obs:
fm10v.append(obs.get_value())
logging.info('CYCLER retrieved %d valid observations, min/mean/max [%g/%g/%g].' %
(len(fm10),np.amin(fm10v),np.mean(fm10v),np.amax(fm10v)))
# run the data assimilation step
covs = [np.ones(dom_shape), hgt / 2000.0]
covs_names = ['const','hgt/2000']
if np.any(rain > 0.01):
covs.append(rain)
covs_names.append('rain')
execute_da_step(model, cycle, covs, covs_names, fm10)
# make geogrid files for WPS; datasets and lines to add to GEOGRID.TBL
geo_path = compute_model_path(cycle, cfg.code, wksp_path,ext="geo")
index = rtma.geogrid_index()
print('index',index)
model.to_geogrid(geo_path,index,lats,lons)
# make wps format files for WPS
fmda_path = osp.join(wksp_path,cfg.code,'{:04d}{:02d}'.format(cycle.year,cycle.month))
time_tag = '{:04d}-{:02d}-{:02d}_{:02d}'.format(cycle.year, cycle.month, cycle.day, cycle.hour)
model.to_wps_format(fmda_path,index,lats,lons,time_tag)
# store the new model
model_path = compute_model_path(cycle, cfg.code, wksp_path)
logging.info('CYCLER writing model variables to: %s.' % model_path)
model.to_netcdf(ensure_dir(model_path),
{'EQUILd FM':Ed,'EQUILw FM':Ew,'TD':TD,'T2':T2,'RH':RH,'PRECIPA':precipa,'PRECIP':rain,'HGT':hgt})
# create visualization and send results
bounds = (lons.min(), lons.max(), lats.min(), lats.max())
pp_path = postprocess_cycle(cycle, cfg, wksp_path, bounds)
if pp_path != None:
if 'shuttle_remote_host' in sys_cfg:
sim_code = 'fmda-' + cfg.code
send_product_to_server(sys_cfg, pp_path, sim_code, sim_code, sim_code + '.json', cfg.region_id + ' FM')
return model
def is_cycle_computed(cycle, cfg, wksp_path):
"""
Check if the fuel model file exists (has been computed) for the
cycle <cycle> and region configuration <cfg>.
:param cycle: the cycle datetime in UTC
:param cfg: the region configuration wrapped in a Dict for convenience
:param wksp_path: the workspace path for the cycler
:return: True if the model file has been found, False otherwise
"""
path = compute_model_path(cycle, cfg.code, wksp_path)
return osp.isfile(path)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
if len(sys.argv) == 2:
pass
elif len(sys.argv) == 5:
code = 'FIRE'
cfg.regions = {
"Fire domain" : {
"code" : code,
"bbox" : sys.argv[1:5]
}
}
try:
os.remove(osp.join(cfg.workspace_path,code+'-geo.nc'))
except Exception as e:
logging.warning(e)
try:
delete(osp.join(cfg.workspace_path,code))
except Exception as e:
logging.warning(e)
else:
print('Usage: to use domains configured in etc/rtma_cycler.json:')
print('./rtma_cycler.sh anything')
print('To use a custom domain named FIRE by giving a bounding box:')
print('./rtma_cycler.sh lat1 lon1 lat2 lon2')
print('Example: ./rtma_cycler.sh 42, -124.6, 49, -116.4')
exit(1)
logging.info('regions: %s' % json.dumps(cfg.regions))
#logging.info('regions: %s' % json.dumps(cfg.regions, indent=1, separators=(',',':')))
# current time
now = datetime.now(pytz.UTC)
cycle = (now - timedelta(minutes=50)).replace(minute=0,second=0,microsecond=0)
logging.info('CYCLER activated at %s, will attempt cycle at %s' % (str(now), str(cycle)))
# what is the most recent RTMA data available?
lookback_length = cfg.lookback_length
dont_have_vars, have_vars = None, None
rtma = RTMA('ingest', ['precipa', 'wspd', 'wdir', 'td', 'temp'])
while lookback_length > 0:
dont_have_vars, have_vars = rtma.retrieve_rtma(cycle)
if dont_have_vars:
logging.info('RTMA variables %s not yet available for cycle %s.' % (str(dont_have_vars), str(cycle)))
cycle -= timedelta(hours=1)
lookback_length -= 1
else:
break
if dont_have_vars:
logging.warning('CYCLER could not find useable cycle.')
logging.warning('CYCLER copying previous post-processing.')
for region_id,region_cfg in six.iteritems(cfg.regions):
wrapped_cfg = Dict(region_cfg)
wrapped_cfg.update({'region_id': region_id})
try:
bounds = compute_rtma_bounds(wrapped_cfg.bbox)
pp_path = postprocess_cycle(cycle, wrapped_cfg, cfg.workspace_path, bounds)
if pp_path != None:
if 'shuttle_remote_host' in sys_cfg:
sim_code = 'fmda-' + wrapped_cfg.code
send_product_to_server(sys_cfg, pp_path, sim_code, sim_code, sim_code + '.json', region_id + ' FM')
except Exception as e:
logging.warning('CYCLER exception {}'.format(e))
logging.error('CYCLER skipping region {} for cycle {}'.format(region_id,str(cycle)))
sys.exit(1)
logging.info('Have RTMA data for cycle %s.' % str(cycle))
# check for each region, if we are up to date w.r.t. RTMA data available
for region_id,region_cfg in six.iteritems(cfg.regions):
wrapped_cfg = Dict(region_cfg)
wrapped_cfg.update({'region_id': region_id})
#if 1: # to run every time for debugging
if not is_cycle_computed(cycle, wrapped_cfg, cfg.workspace_path):
logging.info('CYCLER processing region %s for cycle %s' % (region_id, str(cycle)))
try:
fmda_advance_region(cycle, wrapped_cfg, rtma, cfg.workspace_path, lookback_length, meso_token)
except Exception as e:
logging.warning('CYCLER failed processing region {} for cycle {}'.format(region_id,str(cycle)))
logging.warning('CYCLER exception {}'.format(e))
logging.warning('CYCLER copying previous post-processing or re-trying.')
try:
bounds = compute_rtma_bounds(wrapped_cfg.bbox)
pp_path = postprocess_cycle(cycle, wrapped_cfg, cfg.workspace_path, bounds)
if pp_path != None:
if 'shuttle_remote_host' in sys_cfg:
sim_code = 'fmda-' + wrapped_cfg.code
send_product_to_server(sys_cfg, pp_path, sim_code, sim_code, sim_code + '.json', region_id + ' FM')
except Exception as e:
logging.warning('CYCLER exception {}'.format(e))
logging.error('CYCLER skipping region {} for cycle {}'.format(region_id,str(cycle)))
else:
logging.info('CYCLER the cycle %s for region %s is already complete, skipping ...' % (str(cycle), str(region_id)))
# done
logging.info('CYCLER cycle %s complete.' % str(cycle))
|
openwfm/wrfxpy
|
src/rtma_cycler.py
|
Python
|
mit
| 32,003
|
[
"NetCDF"
] |
9214e2804d313ac3289428a9ccf18fa05633a1eb3414b2fb2e8deabf0847fad8
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2022, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
"""
QE Type Conversions
######################
"""
lengths = {'alat': 'alat', 'bohr': 'au', 'crystal': 'crystal',
'angstrom': 'A', 'crystal_sg': 'crystal_sg'}
def to_qe_type(value):
"""
Convert Python object to the (string) representation to be read in by QE.
Args:
obj: Python object to be converted
Returns:
conv_obj (str): String representation of converted Python object
"""
if isinstance(value, str):
return value
elif isinstance(value, bool):
return '.true.' if value else '.false.'
elif isinstance(value, int) or isinstance(value, float):
return str(value)
else:
raise Exception('Unknown type {0} [{1}].'.format(type(value), value))
def to_py_type(value):
"""
Convert qe string object to a standard Python object.
Args:
obj (str): QE string value
Returns:
conv_obj: Python typed object
"""
value = value.strip()
value = value.replace(',', '')
is_int = None
try:
is_int = int(value)
except:
pass
is_float = None
try:
v1 = value.replace('d', 'e')
is_float = float(v1)
except:
pass
if '.true.' == value:
return True
elif '.false.' == value:
return False
elif is_int:
return is_int
elif is_float:
return is_float
elif isinstance(value, str):
return value
else:
raise Exception('Unknown type {0} [{1}].'.format(type(value), value))
def get_length(value):
"""
"""
value = value.replace(')', '').replace('(', '')
return lengths[value]
|
exa-analytics/exatomic
|
exatomic/qe/types.py
|
Python
|
apache-2.0
| 1,774
|
[
"CRYSTAL"
] |
fae1193c8734c2ba5dbc2251429fc6c48804c323cbd0f9efade0710ab5cef289
|
# The MIT License (MIT)
#
# Copyright (c) 2015 Christofer Hedbrandh
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
__author__ = 'Christofer Hedbrandh (chedbrandh@gmail.com)'
__copyright__ = 'Copyright (c) 2015 Christofer Hedbrandh'
import itertools
import random
import sys
# for python 2 and python 3 compatibility
if sys.version_info < (3,):
range = xrange
class GraphPathFinder(object):
"""Class for finding a path between two sets of vertices using an edge getter function.
By prodiving a DirectedEdgeGetter instead of a whole (potentially very large)
graph, memory can be saved while still being able to find paths between two
sets of vertices.
A GraphPathFinder is created on a per number-of-vertices basis. In order to
find paths of different lengths, multiple GraphPathFinders must be created.
Since the number of vertices are known, the distance from the root vertex is
here referred to as a step. I.e. all start_vertices are at step 0 and all
neighboring vertices (at distance 1) are at step 1.
In the build phase the set of possible vertices at each step is populated.
Note that a vertex with an edge to a vertex in a previous step might not
have an edge to a vertex in the next step. This introduces the concept of
"reachable" vertices. A reachable vertex at some step, is (indirectly)
connected both to a start and an end vertex.
The GraphPathFinder class does not have a method for providing an iterator of
all paths in a random order. In order to provide such functionality all
possible graphs must be kept in memory. This feature could however be created
given the other methods provided here (presumably when the max number of
possible graphs is known and not too great).
"""
def __init__(self, start_vertices, end_vertices, num_vertices, directed_edge_getter):
"""Create a GraphPathFinder given start and end vertices.
Args:
start_vertices:
All found paths start with a vertex in start_vertices.
end_vertices:
All found paths end with a vertex in end_vertices.
num_vertices:
All found paths consist of exactly num_vertices number of vertices.
directed_edge_getter:
DirectedEdgeGetter used to find vertices connected to start vertices
via out edges, and to end vertices via in edges. It is also used to
find those vertices neighbors and the neighbors' neighbors and so on.
Note the importance of the required DirectedEdgeGetter property of an
edge getting listed in both direction. E.g. for all vertices Y listed
in get_end_vertices(X), X is listed in get_start_vertices(Y).
"""
if len(start_vertices) < 1:
raise ValueError("Set of start vertices must be non empty.")
if len(end_vertices) < 1:
raise ValueError("Set of end vertices must be non empty.")
if num_vertices < 2:
raise ValueError("Number of connected vertices must be greater than one.")
self._start_vertices = start_vertices
self._end_vertices = end_vertices
self._num_vertices = num_vertices
self._dedge = directed_edge_getter
# list of reachable vertex sets
# i.e. step 0 contains all start vertices and step -1 contains all end vertices
# step 1 contains all vertices with an edge from both step 0 and 2
self._step_sets = []
# true if there is no path from start to end vertices
self._is_disconnected = False
# build step sets and determine if disconnected
self._build_step_sets()
def is_disconnected(self):
"""If no path exists between the start and end vertices, the graph is disconnected.
Returns:
True if the graph is disconnected and False otherwise.
"""
return self._is_disconnected
def get_random_path(self):
"""Get a random path from a start vertex to an end vertex.
The random path is created by randomly selecting a vertex in the set of all
connected vertices. One random neigbor is the selected in each direction.
Then a neigbors neigbor is randomly selected, and so on until the set of
start and the set of end vertices has been reached.
Returns:
A tuple of vertices of length num_vertices, where first vertex is in the
set of start vertices and the last vertex is in the set of end
vertices, and all vertices in between are connected according to the
directed_edge_getter.
"""
if self.is_disconnected():
raise ValueError("Start and end vertices are disconnected.")
# pick a random step set and a random vertex in that set to start with
path = [None] * self._num_vertices
start_step_index = random.randint(0, self._num_vertices - 1)
start_step_set = self._step_sets[start_step_index]
path[start_step_index] = random.choice(list(start_step_set))
# fill earlier steps
for i in reversed(range(1, start_step_index + 1)):
vertices = self._dedge.get_start_vertices(path[i]).intersection(self._step_sets[i - 1])
path[i - 1] = random.choice(list(vertices))
# fill later steps
for i in range(start_step_index, self._num_vertices - 1):
vertices = self._dedge.get_end_vertices(path[i]).intersection(self._step_sets[i + 1])
path[i + 1] = random.choice(list(vertices))
return tuple(path)
def get_all_paths(self):
"""Get an iterator of all possible paths from a start vertex to an end vertex.
Returns:
A an iterator of tuples of vertices of length num_vertices, where first
vertex is in the set of start vertices and the last vertex is in the set
of end vertices, and all vertices in between are connected according to
the directed_edge_getter.
"""
if self.is_disconnected():
raise ValueError("Start and end vertices are disconnected.")
return self._get_all_paths_generator()
def _get_all_paths_generator(self):
"""Perform a depth first search and yield everytime a leaf vertex is visited."""
# current path is stored in path
path = [None] * self._num_vertices
# currently visiting vertex step_index number of steps from root
step_index = 0
# all unvisited sibling vertices are stored at each step from root
steps_vertices = [None] * self._num_vertices
# start by populating step 0 with all reachable start vertices
steps_vertices[step_index] = list(self._step_sets[0])
# while there are still unvisited paths
while step_index > -1:
# get all remaining unvisited vertices at current step
vertices = steps_vertices[step_index]
if len(vertices) == 0:
# if all vertices have been visited at the current step, then step back
step_index -= 1
else:
# mark the visit of a vertex at the current step by removing it
vertex = vertices.pop()
# update the current path with the vertex
path[step_index] = vertex
# yield path if visiting a leaf vertex
if step_index == self._num_vertices - 1:
yield tuple(path)
# else populate next steps_vertices with all reachable vertices connected to vertex
else:
steps_vertices[step_index + 1] = set(self._dedge.get_end_vertices(vertex)).\
intersection(self._step_sets[step_index + 1])
step_index += 1
def _build_step_sets(self):
"""Build the list of reachable vertex sets.
Step 0 starts out with the set of all start vertices, and step -1 starts
out with the set of all end vertices. All the vertices of step 1 that are
reachable from step 0 can be calculated with the directed_edge_getter. This
does not mean that these vertices are reachable from the other direction
however. Essentially what is done here is building one tree from each
direction; one tree with a root at the start, and one at the end. When they
meet somewhere in the middle, the vertices that don't have an edge in both
directions are removed.
If there is no path between the start and the end vertices, this is
discovered during this process also.
"""
# build step sets
self._step_sets = [set() for i in range(self._num_vertices)]
self._step_sets[0] = set(self._start_vertices).copy()
self._step_sets[-1] = set(self._end_vertices).copy()
# build intermediate steps
earlier_index = 0
later_index = self._num_vertices - 1
while earlier_index < later_index - 1:
earlier_set = self._step_sets[earlier_index]
later_set = self._step_sets[later_index]
# the smallest set takes the next step
if len(earlier_set) < len(later_set):
new_set = _expand(earlier_set, self._dedge.get_end_vertices)
self._step_sets[earlier_index + 1] = new_set
earlier_index += 1
else:
new_set = _expand(later_set, self._dedge.get_start_vertices)
self._step_sets[later_index - 1] = new_set
later_index -= 1
# if no new vertices then the graph is disconnected
if len(new_set) == 0:
self._is_disconnected = True
return
# filter later intermediate steps
for i in range(earlier_index, self._num_vertices - 1):
expand_update(self._step_sets[i + 1], self._step_sets[i], self._dedge.get_end_vertices)
if len(self._step_sets[i + 1]) == 0:
self._is_disconnected = True
return
# filter early intermediate steps
for i in reversed(range(1, later_index + 1)):
expand_update(self._step_sets[i - 1], self._step_sets[i], self._dedge.get_start_vertices)
if len(self._step_sets[i - 1]) == 0:
self._is_disconnected = True
return
def expand_update(the_set, other_set, expand_fn):
"""Updates a set with the intersection of the expanded other set.
I.e.
Updates the_set with the intersection of _expand(other_set, expand_fn).
E.g.
With expand_fn = lambda x: (10*x, 100*x), the_set = set([1, 20, 300]),
and other_set = set([1, 2, 3])
this function leaves the_set with set([20, 300])
Args:
the_set: A set of elements.
other_set: Another set of elements.
expand_fn: Function returning an interable given some element in other_set.
Returns: The filtered the_set
"""
the_set.intersection_update(_expand(other_set, expand_fn))
return the_set
def _expand(the_set, expand_fn):
"""Returns a concatenation of the expanded sets.
I.e.
Returns a set of all elements returned by the expand_fn function for all
elements in the_set.
E.g.
With expand_fn = lambda x: (10*x, 100*x) and the_set = set([1, 2, 3])
this function returns set([10, 100, 20, 200, 30, 300])
Args:
the_set: A set of elements.
expand_fn: Function returning an interable given some element in the_set.
Returns: a concatenation of the expanded sets.
"""
return set(itertools.chain(*[expand_fn(x) for x in the_set]))
|
chedbrandh/glabra
|
glabra/graph.py
|
Python
|
mit
| 11,799
|
[
"VisIt"
] |
273628069a3c8b62073401b80d34d6d29dab48a6f65ee223b34a8fbf943367b8
|
# pylint: skip-file
import sys, os
import argparse
import mxnet as mx
import numpy as np
import logging
import seg_carv_7_init_from_cls
from symbols.irnext_v2_deeplab_v3_dcn_w_hypers import *
from symbols.unet_dcn_w_hypers import *
from seg_carv_1_data_loader import FileIter
from seg_carv_1_data_loader import BatchFileIter
from seg_carv_2_dicemetric import DiceMetric
from seg_carv_3_solver import Solver
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def main():
devs = mx.cpu() if args.gpus is None or args.gpus is '' else [
mx.gpu(int(i)) for i in args.gpus.split(',')]
carvn_root = ''
num_classes = 2
cutoff = None if args.cutoff==0 else args.cutoff
resize = True if args.resize else False
epochs = [74,30,10,5]
if not os.path.exists(args.model_dir):
os.mkdir(args.model_dir)
if 'Deeplab' in args.model:
print "arg.model name is : ", args.model
cls_model_prefix = '-'.join(['CLS'] + args.model.split('-')[1:])
deeplabnet = irnext_deeplab_dcn(**vars(args))
#deeplabnet = UNet_dcn(**vars(args))
deeplabsym = deeplabnet.get_seg_symbol()
model_prefix = args.model
load_prefix = cls_model_prefix
lr = 0.003
run_epochs = 100
load_epoch = 64
else:
raise Exception("error")
arg_names = deeplabsym.list_arguments()
print('loading', load_prefix, load_epoch)
print('lr', lr)
print('model_prefix', model_prefix)
print('running epochs', run_epochs)
print('cutoff size', cutoff)
#args.batch_size = len(devs)
if not args.retrain:
ctx = mx.cpu()
_ , deeplab_args, deeplab_auxs = mx.model.load_checkpoint(load_prefix, load_epoch)
data_shape_dict = {'data': (args.batch_size, 3, args.cutoff, args.cutoff),
'softmax_label': (args.batch_size, args.cutoff, args.cutoff)}
deeplab_args, deeplab_auxs = seg_carv_7_init_from_cls.init_from_irnext_cls(ctx, \
deeplabsym, deeplab_args, deeplab_auxs, data_shape_dict, block567=args.block567)
#deeplab_args, deeplab_auxs = None, None
else:
ctx = mx.cpu()
_ , deeplab_args, deeplab_auxs = mx.model.load_checkpoint(model_prefix, load_epoch)
train_dataiter = BatchFileIter(
path_imglist = "../../carvana_train.lst",
cut_off_size = cutoff,
resize = resize,
rgb_mean = (123.68, 116.779, 103.939),
batch_size = args.batch_size,
)
val_dataiter = BatchFileIter(
path_imglist = "../../carvana_val.lst",
cut_off_size = cutoff,
resize = resize,
rgb_mean = (123.68, 116.779, 103.939),
batch_size = args.batch_size,
)
# learning rate
kv = mx.kvstore.create('local')
# create model
model = mx.mod.Module(
context = devs,
symbol = deeplabsym,
#label_names = ['softmax_label', 'softmax2_label']
)
optimizer_params = {
'learning_rate': lr,
'momentum' : 0.9,
'wd' : 0.00005
}
_dice = DiceMetric()
eval_metrics = [mx.metric.create(_dice)]
initializer = mx.init.Xavier(rnd_type='gaussian', factor_type="in", magnitude=2)
model.fit(train_dataiter,
begin_epoch = 0,
num_epoch = run_epochs,
eval_data = val_dataiter,
eval_metric = eval_metrics,
kvstore = kv,
optimizer = 'sgd',
optimizer_params = optimizer_params,
initializer = initializer,
arg_params = deeplab_args,
aux_params = deeplab_auxs,
batch_end_callback = mx.callback.Speedometer(args.batch_size, 20),
epoch_end_callback = mx.callback.do_checkpoint(model_prefix),
allow_missing = True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Convert IRNeXt to Deeplabv3 model.')
# Deeplab-ResNet Structure
parser.set_defaults(
# network
network = 'irnext',
num_layers = 74,
outfeature = 2048,
bottle_neck = 1,
expansion = 4,
num_group = 1,
dilpat = 'DEEPLAB.PLATEAU',
irv2 = False,
deform = 1,
sqex = 1,
ratt = 0,
block567 = '',
aspp = 1,
deeplabversion = 2,
usemax = 0,
taskmode = 'SEG',
seg_stride_mode = '8x',
batch_size = 8,
# data
num_classes = 2,
#num_examples = 1281167,
#image_shape = '3,224,224',
#lastout = 7,
#min_random_scale = 1.0 , # if input image has min size k, suggest to use
# 256.0/x, e.g. 0.533 for 480
# train
#num_epochs = 80,
#lr_step_epochs = '30,50,70',
dtype = 'float32'
)
'''
# UNet Structure
parser.set_defaults(
# network
num_filter = 16,
bottle_neck = 0,
unitbatchnorm = True,
deform = 0,
sqex = 0,
# data
num_classes = 2,
#num_examples = 1281167,
#image_shape = '3,224,224',
#lastout = 7,
#min_random_scale = 1.0 , # if input image has min size k, suggest to use
# 256.0/x, e.g. 0.533 for 480
# train
#num_epochs = 80,
#lr_step_epochs = '30,50,70',
batch_size = 16,
dtype = 'float32'
)
'''
parser.add_argument('--model', default='DeeplabV3-ResNeXt-152L64X1D4XP',
help='The type of DeeplabV3-ResNeXt model, e.g. DeeplabV3-ResNeXt-152L64X1D4XP, DeeplabV3-ResNeXt-50L96X4D1ov2XP')
parser.add_argument('--model-dir', default='./model',
help='directory to save model.')
parser.add_argument('--cutoff', type=int, default=1152,
help='cutoff size.')
parser.add_argument('--resize', type=int, default=0,
help='cutoff size.')
parser.add_argument('--gpus', default='',
help='gpus for use.')
parser.add_argument('--retrain', action='store_true', default=False,
help='true means continue training.')
args = parser.parse_args()
logging.info(args)
main()
|
deepinsight/Deformable-ConvNets
|
deeplab/runs_carv/seg_carv_4_train.py
|
Python
|
apache-2.0
| 6,827
|
[
"Gaussian"
] |
8168aaf2116e7a52742b8771d9fdd3a7908e78c2e1d4decc48fa1da55bfdb0a7
|
# Copyright 2002 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
# get set abstraction for graph representation
from Bio.Pathway.Rep.HashSet import *
class Graph:
"""A directed graph abstraction with labeled edges."""
def __init__(self, nodes = []):
"""Initializes a new Graph object."""
self.__adjacency_list = {} # maps parent -> set of child objects
for n in nodes:
self.__adjacency_list[n] = HashSet()
self.__label_map = {} # maps label -> set of (parent, child) pairs
self.__edge_map = {} # maps (parent, child) pair -> label
def __eq__(self, g):
"""Returns true if g is equal to this graph."""
return isinstance(g, Graph) and \
(self.__adjacency_list == g.__adjacency_list) and \
(self.__label_map == g.__label_map) and \
(self.__edge_map == g.__edge_map)
def __ne__(self, g):
"""Returns true if g is not equal to this graph."""
return not self.__eq__(g)
def __repr__(self):
"""Returns an unique string representation of this graph."""
s = "<Graph: "
keys = self.__adjacency_list.keys()
keys.sort()
for key in keys:
values = [(x,self.__edge_map[(key,x)]) \
for x in self.__adjacency_list[key].list()]
values.sort()
s = s + "(" + repr(key) + ": " + ",".join(map(repr, values)) + ")"
return s + ">"
def __str__(self):
"""Returns a concise string description of this graph."""
nodenum = len(self.__adjacency_list.keys())
edgenum = reduce(lambda x,y: x+y,
map(len, self.__adjacency_list.values()))
labelnum = len(self.__label_map.keys())
return "<Graph: " + \
str(nodenum) + " node(s), " + \
str(edgenum) + " edge(s), " + \
str(labelnum) + " unique label(s)>"
def add_node(self, node):
"""Adds a node to this graph."""
if not self.__adjacency_list.has_key(node):
self.__adjacency_list[node] = HashSet()
def add_edge(self, source, to, label = None):
"""Adds an edge to this graph."""
if not self.__adjacency_list.has_key(source):
raise ValueError, "Unknown <from> node: " + str(source)
if not self.__adjacency_list.has_key(to):
raise ValueError, "Unknown <to> node: " + str(to)
if self.__edge_map.has_key((source,to)):
raise ValueError, str(source) + " -> " + str(to) + " exists"
self.__adjacency_list[source].add(to)
if not self.__label_map.has_key(label):
self.__label_map[label] = HashSet()
self.__label_map[label].add((source,to))
self.__edge_map[(source,to)] = label
def child_edges(self, parent):
"""Returns a list of (child, label) pairs for parent."""
if not self.__adjacency_list.has_key(parent):
raise ValueError, "Unknown <parent> node: " + str(parent)
return [(x, self.__edge_map[(parent,x)]) \
for x in self.__adjacency_list[parent].list()]
def children(self, parent):
"""Returns a list of unique children for parent."""
return self.__adjacency_list[parent].list()
def edges(self, label):
"""Returns a list of all the edges with this label."""
if not self.__label_map.has_key(label):
raise ValueError, "Unknown label: " + str(label)
return self.__label_map[label].list()
def labels(self):
"""Returns a list of all the edge labels in this graph."""
return self.__label_map.keys()
def nodes(self):
"""Returns a list of the nodes in this graph."""
return self.__adjacency_list.keys()
def parent_edges(self, child):
"""Returns a list of (parent, label) pairs for child."""
if not self.__adjacency_list.has_key(child):
raise ValueError, "Unknown <child> node: " + str(child)
parents = []
for parent in self.__adjacency_list.keys():
children = self.__adjacency_list[parent]
for x in children.list():
if x is child:
parents.append((parent, self.__edge_map[(parent, child)]))
return parents
def parents(self, child):
"""Returns a list of unique parents for child."""
s = HashSet([x[0] for x in self.parent_edges(child)])
return s.list()
def remove_node(self, node):
"""Removes node and all edges connected to it."""
if not self.__adjacency_list.has_key(node):
raise ValueError, "Unknown node: " + str(node)
# remove node (and all out-edges) from adjacency list
del self.__adjacency_list[node]
# remove all in-edges from adjacency list
for n in self.__adjacency_list.keys():
self.__adjacency_list[n] = HashSet(filter(lambda x,node=node: x is not node,
self.__adjacency_list[n].list()))
# remove all refering pairs in label map
for label in self.__label_map.keys():
lm = HashSet(filter(lambda x,node=node: \
(x[0] is not node) and (x[1] is not node),
self.__label_map[label].list()))
# remove the entry completely if the label is now unused
if lm.empty():
del self.__label_map[label]
else:
self.__label_map[label] = lm
# remove all refering entries in edge map
for edge in self.__edge_map.keys():
if edge[0] is node or edge[1] is node:
del self.__edge_map[edge]
def remove_edge(self, parent, child, label):
"""Removes edge. -- NOT IMPLEMENTED"""
# hm , this is a multigraph - how should this be implemented?
raise NotImplementedError, "remove_edge is not yet implemented"
|
dbmi-pitt/DIKB-Micropublication
|
scripts/mp-scripts/Bio/Pathway/Rep/Graph.py
|
Python
|
apache-2.0
| 6,168
|
[
"Biopython"
] |
75c6142e76f2dafd02d748e9dcadb5467fdf4a6d03cc02e6ec5d0b3b0f69d660
|
# Copyright 2008-2011 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
Bio.AlignIO support for "fasta-m10" output from Bill Pearson's FASTA tools.
You are expected to use this module via the Bio.AlignIO functions (or the
Bio.SeqIO functions if you want to work directly with the gapped sequences).
This module contains a parser for the pairwise alignments produced by Bill
Pearson's FASTA tools, for use from the Bio.AlignIO interface where it is
refered to as the "fasta-m10" file format (as we only support the machine
readable output format selected with the -m 10 command line option).
This module does NOT cover the generic "fasta" file format originally
developed as an input format to the FASTA tools. The Bio.AlignIO and
Bio.SeqIO both use the Bio.SeqIO.FastaIO module to deal with these files,
which can also be used to store a multiple sequence alignments.
"""
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Align import MultipleSeqAlignment
from Interfaces import AlignmentIterator
from Bio.Alphabet import single_letter_alphabet, generic_dna, generic_protein
from Bio.Alphabet import Gapped
def _extract_alignment_region(alignment_seq_with_flanking, annotation):
"""Helper function for the main parsing code (PRIVATE).
To get the actual pairwise alignment sequences, we must first
translate the un-gapped sequence based coordinates into positions
in the gapped sequence (which may have a flanking region shown
using leading - characters). To date, I have never seen any
trailing flanking region shown in the m10 file, but the
following code should also cope with that.
Note that this code seems to work fine even when the "sq_offset"
entries are prsent as a result of using the -X command line option.
"""
align_stripped = alignment_seq_with_flanking.strip("-")
display_start = int(annotation['al_display_start'])
if int(annotation['al_start']) <= int(annotation['al_stop']):
start = int(annotation['al_start']) \
- display_start
end = int(annotation['al_stop']) \
- display_start + 1
else:
#FASTA has flipped this sequence...
start = display_start \
- int(annotation['al_start'])
end = display_start \
- int(annotation['al_stop']) + 1
end += align_stripped.count("-")
assert 0 <= start and start < end and end <= len(align_stripped), \
"Problem with sequence start/stop,\n%s[%i:%i]\n%s" \
% (alignment_seq_with_flanking, start, end, annotation)
return align_stripped[start:end]
def FastaM10Iterator(handle, alphabet = single_letter_alphabet):
"""Alignment iterator for the FASTA tool's pairwise alignment output.
This is for reading the pairwise alignments output by Bill Pearson's
FASTA program when called with the -m 10 command line option for machine
readable output. For more details about the FASTA tools, see the website
http://fasta.bioch.virginia.edu/ and the paper:
W.R. Pearson & D.J. Lipman PNAS (1988) 85:2444-2448
This class is intended to be used via the Bio.AlignIO.parse() function
by specifying the format as "fasta-m10" as shown in the following code:
from Bio import AlignIO
handle = ...
for a in AlignIO.parse(handle, "fasta-m10"):
assert len(a) == 2, "Should be pairwise!"
print "Alignment length %i" % a.get_alignment_length()
for record in a:
print record.seq, record.name, record.id
Note that this is not a full blown parser for all the information
in the FASTA output - for example, most of the header and all of the
footer is ignored. Also, the alignments are not batched according to
the input queries.
Also note that there can be up to about 30 letters of flanking region
included in the raw FASTA output as contextual information. This is NOT
part of the alignment itself, and is not included in the resulting
MultipleSeqAlignment objects returned.
"""
if alphabet is None:
alphabet = single_letter_alphabet
state_PREAMBLE = -1
state_NONE = 0
state_QUERY_HEADER = 1
state_ALIGN_HEADER = 2
state_ALIGN_QUERY = 3
state_ALIGN_MATCH = 4
state_ALIGN_CONS = 5
def build_hsp():
if not query_tags and not match_tags:
raise ValueError("No data for query %r, match %r" \
% (query_id, match_id))
assert query_tags, query_tags
assert match_tags, match_tags
evalue = align_tags.get("fa_expect", None)
q = "?" #Just for printing len(q) in debug below
m = "?" #Just for printing len(m) in debug below
tool = global_tags.get("tool", "").upper()
try:
q = _extract_alignment_region(query_seq, query_tags)
if tool in ["TFASTX"] and len(match_seq) == len(q):
m = match_seq
#Quick hack until I can work out how -, * and / characters
#and the apparent mix of aa and bp coordindates works.
else:
m = _extract_alignment_region(match_seq, match_tags)
assert len(q) == len(m)
except AssertionError, err:
print "Darn... amino acids vs nucleotide coordinates?"
print tool
print query_seq
print query_tags
print q, len(q)
print match_seq
print match_tags
print m, len(m)
print handle.name
raise err
assert alphabet is not None
alignment = MultipleSeqAlignment([], alphabet)
#TODO - Introduce an annotated alignment class?
#For now, store the annotation a new private property:
alignment._annotations = {}
#Want to record both the query header tags, and the alignment tags.
for key, value in header_tags.iteritems():
alignment._annotations[key] = value
for key, value in align_tags.iteritems():
alignment._annotations[key] = value
#Query
#=====
record = SeqRecord(Seq(q, alphabet),
id = query_id,
name = "query",
description = query_descr,
annotations = {"original_length" : int(query_tags["sq_len"])})
#TODO - handle start/end coordinates properly. Short term hack for now:
record._al_start = int(query_tags["al_start"])
record._al_stop = int(query_tags["al_stop"])
alignment.append(record)
#TODO - What if a specific alphabet has been requested?
#TODO - Use an IUPAC alphabet?
#TODO - Can FASTA output RNA?
if alphabet == single_letter_alphabet and "sq_type" in query_tags:
if query_tags["sq_type"] == "D":
record.seq.alphabet = generic_dna
elif query_tags["sq_type"] == "p":
record.seq.alphabet = generic_protein
if "-" in q:
if not hasattr(record.seq.alphabet,"gap_char"):
record.seq.alphabet = Gapped(record.seq.alphabet, "-")
#Match
#=====
record = SeqRecord(Seq(m, alphabet),
id = match_id,
name = "match",
description = match_descr,
annotations = {"original_length" : int(match_tags["sq_len"])})
#TODO - handle start/end coordinates properly. Short term hack for now:
record._al_start = int(match_tags["al_start"])
record._al_stop = int(match_tags["al_stop"])
alignment.append(record)
#This is still a very crude way of dealing with the alphabet:
if alphabet == single_letter_alphabet and "sq_type" in match_tags:
if match_tags["sq_type"] == "D":
record.seq.alphabet = generic_dna
elif match_tags["sq_type"] == "p":
record.seq.alphabet = generic_protein
if "-" in m:
if not hasattr(record.seq.alphabet,"gap_char"):
record.seq.alphabet = Gapped(record.seq.alphabet, "-")
return alignment
state = state_PREAMBLE
query_id = None
match_id = None
query_descr = ""
match_descr = ""
global_tags = {}
header_tags = {}
align_tags = {}
query_tags = {}
match_tags = {}
query_seq = ""
match_seq = ""
cons_seq = ""
for line in handle:
if ">>>" in line and not line.startswith(">>>"):
if query_id and match_id:
#This happens on old FASTA output which lacked an end of
#query >>><<< marker line.
yield build_hsp()
state = state_NONE
query_descr = line[line.find(">>>")+3:].strip()
query_id = query_descr.split(None,1)[0]
match_id = None
header_tags = {}
align_tags = {}
query_tags = {}
match_tags = {}
query_seq = ""
match_seq = ""
cons_seq = ""
elif line.startswith("!! No "):
#e.g.
#!! No library sequences with E() < 0.5
#or on more recent versions,
#No sequences with E() < 0.05
assert state == state_NONE
assert not header_tags
assert not align_tags
assert not match_tags
assert not query_tags
assert match_id is None
assert not query_seq
assert not match_seq
assert not cons_seq
query_id = None
elif line.strip() in [">>><<<", ">>>///"]:
#End of query, possible end of all queries
if query_id and match_id:
yield build_hsp()
state = state_NONE
query_id = None
match_id = None
header_tags = {}
align_tags = {}
query_tags = {}
match_tags = {}
query_seq = ""
match_seq = ""
cons_seq = ""
elif line.startswith(">>>"):
#Should be start of a match!
assert query_id is not None
assert line[3:].split(", ",1)[0] == query_id, line
assert match_id is None
assert not header_tags
assert not align_tags
assert not query_tags
assert not match_tags
assert not match_seq
assert not query_seq
assert not cons_seq
state = state_QUERY_HEADER
elif line.startswith(">>"):
#Should now be at start of a match alignment!
if query_id and match_id:
yield build_hsp()
align_tags = {}
query_tags = {}
match_tags = {}
query_seq = ""
match_seq = ""
cons_seq = ""
match_descr = line[2:].strip()
match_id = match_descr.split(None,1)[0]
state = state_ALIGN_HEADER
elif line.startswith(">--"):
#End of one HSP
assert query_id and match_id, line
yield build_hsp()
#Clean up read for next HSP
#but reuse header_tags
align_tags = {}
query_tags = {}
match_tags = {}
query_seq = ""
match_seq = ""
cons_seq = ""
state = state_ALIGN_HEADER
elif line.startswith(">"):
if state == state_ALIGN_HEADER:
#Should be start of query alignment seq...
assert query_id is not None, line
assert match_id is not None, line
assert query_id.startswith(line[1:].split(None,1)[0]), line
state = state_ALIGN_QUERY
elif state == state_ALIGN_QUERY:
#Should be start of match alignment seq
assert query_id is not None, line
assert match_id is not None, line
assert match_id.startswith(line[1:].split(None,1)[0]), line
state = state_ALIGN_MATCH
elif state == state_NONE:
#Can get > as the last line of a histogram
pass
else:
assert False, "state %i got %r" % (state, line)
elif line.startswith("; al_cons"):
assert state == state_ALIGN_MATCH, line
state = state_ALIGN_CONS
#Next line(s) should be consensus seq...
elif line.startswith("; "):
if ": " in line:
key, value = [s.strip() for s in line[2:].split(": ",1)]
else:
import warnings
#Seen in lalign36, specifically version 36.3.4 Apr, 2011
#Fixed in version 36.3.5b Oct, 2011(preload8)
warnings.warn("Missing colon in line: %r" % line)
try:
key, value = [s.strip() for s in line[2:].split(" ",1)]
except ValueError:
raise ValueError("Bad line: %r" % line)
if state == state_QUERY_HEADER:
header_tags[key] = value
elif state == state_ALIGN_HEADER:
align_tags[key] = value
elif state == state_ALIGN_QUERY:
query_tags[key] = value
elif state == state_ALIGN_MATCH:
match_tags[key] = value
else:
assert False, "Unexpected state %r, %r" % (state, line)
elif state == state_ALIGN_QUERY:
query_seq += line.strip()
elif state == state_ALIGN_MATCH:
match_seq += line.strip()
elif state == state_ALIGN_CONS:
cons_seq += line.strip("\n")
elif state == state_PREAMBLE:
if line.startswith("#"):
global_tags["command"] = line[1:].strip()
elif line.startswith(" version "):
global_tags["version"] = line[9:].strip()
elif " compares a " in line:
global_tags["tool"] = line[:line.find(" compares a ")].strip()
elif " searches a " in line:
global_tags["tool"] = line[:line.find(" searches a ")].strip()
else:
pass
if __name__ == "__main__":
print "Running a quick self-test"
#http://emboss.sourceforge.net/docs/themes/alnformats/align.simple
simple_example = \
"""# /opt/fasta/fasta34 -Q -H -E 1 -m 10 NC_002127.faa NC_009649.faa
FASTA searches a protein or DNA sequence data bank
version 34.26 January 12, 2007
Please cite:
W.R. Pearson & D.J. Lipman PNAS (1988) 85:2444-2448
Query library NC_002127.faa vs NC_009649.faa library
searching NC_009649.faa library
1>>>gi|10955263|ref|NP_052604.1| plasmid mobilization [Escherichia coli O157:H7 s 107 aa - 107 aa
vs NC_009649.faa library
45119 residues in 180 sequences
Expectation_n fit: rho(ln(x))= 6.9146+/-0.0249; mu= -5.7948+/- 1.273
mean_var=53.6859+/-13.609, 0's: 0 Z-trim: 1 B-trim: 9 in 1/25
Lambda= 0.175043
FASTA (3.5 Sept 2006) function [optimized, BL50 matrix (15:-5)] ktup: 2
join: 36, opt: 24, open/ext: -10/-2, width: 16
Scan time: 0.000
The best scores are: opt bits E(180)
gi|152973457|ref|YP_001338508.1| ATPase with chape ( 931) 71 24.9 0.58
gi|152973588|ref|YP_001338639.1| F pilus assembly ( 459) 63 23.1 0.99
>>>gi|10955263|ref|NP_052604.1|, 107 aa vs NC_009649.faa library
; pg_name: /opt/fasta/fasta34
; pg_ver: 34.26
; pg_argv: /opt/fasta/fasta34 -Q -H -E 1 -m 10 NC_002127.faa NC_009649.faa
; pg_name: FASTA
; pg_ver: 3.5 Sept 2006
; pg_matrix: BL50 (15:-5)
; pg_open-ext: -10 -2
; pg_ktup: 2
; pg_optcut: 24
; pg_cgap: 36
; mp_extrap: 60000 180
; mp_stats: Expectation_n fit: rho(ln(x))= 6.9146+/-0.0249; mu= -5.7948+/- 1.273 mean_var=53.6859+/-13.609, 0's: 0 Z-trim: 1 B-trim: 9 in 1/25 Lambda= 0.175043
; mp_KS: -0.0000 (N=0) at 8159228
>>gi|152973457|ref|YP_001338508.1| ATPase with chaperone activity, ATP-binding subunit [Klebsiella pneumoniae subsp. pneumoniae MGH 78578]
; fa_frame: f
; fa_initn: 65
; fa_init1: 43
; fa_opt: 71
; fa_z-score: 90.3
; fa_bits: 24.9
; fa_expect: 0.58
; sw_score: 71
; sw_ident: 0.250
; sw_sim: 0.574
; sw_overlap: 108
>gi|10955263| ..
; sq_len: 107
; sq_offset: 1
; sq_type: p
; al_start: 5
; al_stop: 103
; al_display_start: 1
--------------------------MTKRSGSNT-RRRAISRPVRLTAE
ED---QEIRKRAAECGKTVSGFLRAAALGKKVNSLTDDRVLKEVM-----
RLGALQKKLFIDGKRVGDREYAEVLIAITEYHRALLSRLMAD
>gi|152973457|ref|YP_001338508.1| ..
; sq_len: 931
; sq_type: p
; al_start: 96
; al_stop: 195
; al_display_start: 66
SDFFRIGDDATPVAADTDDVVDASFGEPAAAGSGAPRRRGSGLASRISEQ
SEALLQEAAKHAAEFGRS------EVDTEHLLLALADSDVVKTILGQFKI
KVDDLKRQIESEAKR-GDKPF-EGEIGVSPRVKDALSRAFVASNELGHSY
VGPEHFLIGLAEEGEGLAANLLRRYGLTPQ
>>gi|152973588|ref|YP_001338639.1| F pilus assembly protein [Klebsiella pneumoniae subsp. pneumoniae MGH 78578]
; fa_frame: f
; fa_initn: 33
; fa_init1: 33
; fa_opt: 63
; fa_z-score: 86.1
; fa_bits: 23.1
; fa_expect: 0.99
; sw_score: 63
; sw_ident: 0.266
; sw_sim: 0.656
; sw_overlap: 64
>gi|10955263| ..
; sq_len: 107
; sq_offset: 1
; sq_type: p
; al_start: 32
; al_stop: 94
; al_display_start: 2
TKRSGSNTRRRAISRPVRLTAEEDQEIRKRAAECGKTVSGFLRAAALGKK
VNSLTDDRVLKEV-MRLGALQKKLFIDGKRVGDREYAEVLIAITEYHRAL
LSRLMAD
>gi|152973588|ref|YP_001338639.1| ..
; sq_len: 459
; sq_type: p
; al_start: 191
; al_stop: 248
; al_display_start: 161
VGGLFPRTQVAQQKVCQDIAGESNIFSDWAASRQGCTVGG--KMDSVQDK
ASDKDKERVMKNINIMWNALSKNRLFDG----NKELKEFIMTLTGTLIFG
ENSEITPLPARTTDQDLIRAMMEGGTAKIYHCNDSDKCLKVVADATVTIT
SNKALKSQISALLSSIQNKAVADEKLTDQE
2>>>gi|10955264|ref|NP_052605.1| hypothetical protein pOSAK1_02 [Escherichia coli O157:H7 s 126 aa - 126 aa
vs NC_009649.faa library
45119 residues in 180 sequences
Expectation_n fit: rho(ln(x))= 7.1374+/-0.0246; mu= -7.6540+/- 1.313
mean_var=51.1189+/-13.171, 0's: 0 Z-trim: 1 B-trim: 8 in 1/25
Lambda= 0.179384
FASTA (3.5 Sept 2006) function [optimized, BL50 matrix (15:-5)] ktup: 2
join: 36, opt: 24, open/ext: -10/-2, width: 16
Scan time: 0.000
The best scores are: opt bits E(180)
gi|152973462|ref|YP_001338513.1| hypothetical prot ( 101) 58 22.9 0.29
>>>gi|10955264|ref|NP_052605.1|, 126 aa vs NC_009649.faa library
; pg_name: /opt/fasta/fasta34
; pg_ver: 34.26
; pg_argv: /opt/fasta/fasta34 -Q -H -E 1 -m 10 NC_002127.faa NC_009649.faa
; pg_name: FASTA
; pg_ver: 3.5 Sept 2006
; pg_matrix: BL50 (15:-5)
; pg_open-ext: -10 -2
; pg_ktup: 2
; pg_optcut: 24
; pg_cgap: 36
; mp_extrap: 60000 180
; mp_stats: Expectation_n fit: rho(ln(x))= 7.1374+/-0.0246; mu= -7.6540+/- 1.313 mean_var=51.1189+/-13.171, 0's: 0 Z-trim: 1 B-trim: 8 in 1/25 Lambda= 0.179384
; mp_KS: -0.0000 (N=0) at 8159228
>>gi|152973462|ref|YP_001338513.1| hypothetical protein KPN_pKPN3p05904 [Klebsiella pneumoniae subsp. pneumoniae MGH 78578]
; fa_frame: f
; fa_initn: 50
; fa_init1: 50
; fa_opt: 58
; fa_z-score: 95.8
; fa_bits: 22.9
; fa_expect: 0.29
; sw_score: 58
; sw_ident: 0.289
; sw_sim: 0.632
; sw_overlap: 38
>gi|10955264| ..
; sq_len: 126
; sq_offset: 1
; sq_type: p
; al_start: 1
; al_stop: 38
; al_display_start: 1
------------------------------MKKDKKYQIEAIKNKDKTLF
IVYATDIYSPSEFFSKIESDLKKKKSKGDVFFDLIIPNGGKKDRYVYTSF
NGEKFSSYTLNKVTKTDEYN
>gi|152973462|ref|YP_001338513.1| ..
; sq_len: 101
; sq_type: p
; al_start: 44
; al_stop: 81
; al_display_start: 14
DALLGEIQRLRKQVHQLQLERDILTKANELIKKDLGVSFLKLKNREKTLI
VDALKKKYPVAELLSVLQLARSCYFYQNVCTISMRKYA
3>>>gi|10955265|ref|NP_052606.1| hypothetical protein pOSAK1_03 [Escherichia coli O157:H7 s 346 aa - 346 aa
vs NC_009649.faa library
45119 residues in 180 sequences
Expectation_n fit: rho(ln(x))= 6.0276+/-0.0276; mu= 3.0670+/- 1.461
mean_var=37.1634+/- 8.980, 0's: 0 Z-trim: 1 B-trim: 14 in 1/25
Lambda= 0.210386
FASTA (3.5 Sept 2006) function [optimized, BL50 matrix (15:-5)] ktup: 2
join: 37, opt: 25, open/ext: -10/-2, width: 16
Scan time: 0.020
The best scores are: opt bits E(180)
gi|152973545|ref|YP_001338596.1| putative plasmid ( 242) 70 27.5 0.082
>>>gi|10955265|ref|NP_052606.1|, 346 aa vs NC_009649.faa library
; pg_name: /opt/fasta/fasta34
; pg_ver: 34.26
; pg_argv: /opt/fasta/fasta34 -Q -H -E 1 -m 10 NC_002127.faa NC_009649.faa
; pg_name: FASTA
; pg_ver: 3.5 Sept 2006
; pg_matrix: BL50 (15:-5)
; pg_open-ext: -10 -2
; pg_ktup: 2
; pg_optcut: 25
; pg_cgap: 37
; mp_extrap: 60000 180
; mp_stats: Expectation_n fit: rho(ln(x))= 6.0276+/-0.0276; mu= 3.0670+/- 1.461 mean_var=37.1634+/- 8.980, 0's: 0 Z-trim: 1 B-trim: 14 in 1/25 Lambda= 0.210386
; mp_KS: -0.0000 (N=0) at 8159228
>>gi|152973545|ref|YP_001338596.1| putative plasmid SOS inhibition protein A [Klebsiella pneumoniae subsp. pneumoniae MGH 78578]
; fa_frame: f
; fa_initn: 52
; fa_init1: 52
; fa_opt: 70
; fa_z-score: 105.5
; fa_bits: 27.5
; fa_expect: 0.082
; sw_score: 70
; sw_ident: 0.279
; sw_sim: 0.651
; sw_overlap: 43
>gi|10955265| ..
; sq_len: 346
; sq_offset: 1
; sq_type: p
; al_start: 197
; al_stop: 238
; al_display_start: 167
DFMCSILNMKEIVEQKNKEFNVDIKKETIESELHSKLPKSIDKIHEDIKK
QLSC-SLIMKKIDVEMEDYSTYCFSALRAIEGFIYQILNDVCNPSSSKNL
GEYFTENKPKYIIREIHQET
>gi|152973545|ref|YP_001338596.1| ..
; sq_len: 242
; sq_type: p
; al_start: 52
; al_stop: 94
; al_display_start: 22
IMTVEEARQRGARLPSMPHVRTFLRLLTGCSRINSDVARRIPGIHRDPKD
RLSSLKQVEEALDMLISSHGEYCPLPLTMDVQAENFPEVLHTRTVRRLKR
QDFAFTRKMRREARQVEQSW
>>><<<
579 residues in 3 query sequences
45119 residues in 180 library sequences
Scomplib [34.26]
start: Tue May 20 16:38:45 2008 done: Tue May 20 16:38:45 2008
Total Scan time: 0.020 Total Display time: 0.010
Function used was FASTA [version 34.26 January 12, 2007]
"""
from StringIO import StringIO
alignments = list(FastaM10Iterator(StringIO(simple_example)))
assert len(alignments) == 4, len(alignments)
assert len(alignments[0]) == 2
for a in alignments:
print "Alignment %i sequences of length %i" \
% (len(a), a.get_alignment_length())
for r in a:
print "%s %s %i" % (r.seq, r.id, r.annotations["original_length"])
#print a.annotations
print "Done"
import os
path = "../../Tests/Fasta/"
files = [f for f in os.listdir(path) if os.path.splitext(f)[-1] == ".m10"]
files.sort()
for filename in files:
if os.path.splitext(filename)[-1] == ".m10":
print
print filename
print "="*len(filename)
for i,a in enumerate(FastaM10Iterator(open(os.path.join(path,filename)))):
print "#%i, %s" % (i+1,a)
for r in a:
if "-" in r.seq:
assert r.seq.alphabet.gap_char == "-"
else:
assert not hasattr(r.seq.alphabet, "gap_char")
|
bryback/quickseq
|
genescript/Bio/AlignIO/FastaIO.py
|
Python
|
mit
| 23,159
|
[
"Biopython"
] |
5715a40f55bf58a8e465adbe4a5d373c376775e45d0251bf2368f50d87e088b6
|
import json
import sys
from copy import copy
from decimal import Decimal, InvalidOperation
from math import isclose, isfinite
from typing import Type
from zlib import compress
import pytest
from conftest import needs_libqpdf_v, skip_if_pypy
from hypothesis import assume, example, given
from hypothesis.strategies import (
binary,
booleans,
characters,
floats,
integers,
lists,
recursive,
)
import pikepdf
from pikepdf import (
Array,
Dictionary,
Name,
Object,
Operator,
Pdf,
PdfError,
Stream,
String,
)
from pikepdf import _qpdf as qpdf
from pikepdf.models import parse_content_stream
# pylint: disable=eval-used, redefined-outer-name
encode = qpdf._encode
def test_none():
assert encode(None) is None
def test_booleans():
assert encode(True) == True # noqa: E712
assert encode(False) == False # noqa: E712
@given(characters(min_codepoint=0x20, max_codepoint=0x7F))
@example('')
def test_ascii_involution(ascii_):
b = ascii_.encode('ascii')
assert encode(b) == b
@given(
characters(min_codepoint=0x0, max_codepoint=0xFEF0, blacklist_categories=('Cs',))
)
@example('')
def test_unicode_involution(s):
assert str(encode(s)) == s
@given(characters(whitelist_categories=('Cs',)))
def test_unicode_fails(s):
with pytest.raises(UnicodeEncodeError):
encode(s)
@given(binary(min_size=0, max_size=300))
def test_binary_involution(binary_):
assert bytes(encode(binary_)) == binary_
int64s = integers(min_value=-9223372036854775807, max_value=9223372036854775807)
@given(int64s, int64s)
def test_integer_comparison(a, b):
equals = a == b
encoded_equals = encode(a) == encode(b)
assert encoded_equals == equals
lessthan = a < b
encoded_lessthan = encode(a) < encode(b)
assert lessthan == encoded_lessthan
@given(integers(-(10 ** 12), 10 ** 12), integers(0, 12))
def test_decimal_involution(num, radix):
strnum = str(num)
if radix > len(strnum):
strnum = strnum[:radix] + '.' + strnum[radix:]
d = Decimal(strnum)
assert encode(d) == d
@given(floats())
def test_decimal_from_float(f):
d = Decimal(f)
if isfinite(f) and d.is_finite():
try:
# PDF is limited to ~5 sig figs
decstr = str(d.quantize(Decimal('1.000000')))
except InvalidOperation:
return # PDF doesn't support exponential notation
try:
py_d = Object.parse(decstr)
except RuntimeError as e:
if 'overflow' in str(e) or 'underflow' in str(e):
py_d = Object.parse(str(f))
assert isclose(py_d, d, abs_tol=1e-5), (d, f.hex())
else:
with pytest.raises(PdfError):
Object.parse(str(d))
@skip_if_pypy
def test_stack_depth():
a = [42]
for _ in range(100):
a = [a]
rlimit = sys.getrecursionlimit()
try:
sys.setrecursionlimit(100)
with pytest.raises(RecursionError):
assert encode(a) == a
with pytest.raises(RecursionError):
assert encode(a) == encode(a) # pylint: disable=expression-not-assigned
with pytest.raises(RecursionError):
repr(a)
finally:
sys.setrecursionlimit(rlimit) # So other tests are not affected
def test_bytes():
b = b'\x79\x78\x77\x76'
qs = String(b)
assert bytes(qs) == b
s = 'é'
qs = String(s)
assert str(qs) == s
assert Name('/xyz') == b'/xyz'
with pytest.raises(TypeError, match='should be str'):
Name(b'/bytes')
class TestArray:
def test_len_array(self):
assert len(Array([])) == 0
assert len(Array()) == 0
assert len(Array([3])) == 1
def test_wrap_array(self):
assert Name('/Foo').wrap_in_array() == Array([Name('/Foo')])
assert Array([42]).wrap_in_array() == Array([42])
@given(lists(integers(-10, 10), min_size=0, max_size=10))
def test_list(self, array):
a = pikepdf.Array(array)
assert a == array
@given(
lists(lists(integers(1, 10), min_size=1, max_size=5), min_size=1, max_size=5)
)
def test_nested_list(self, array):
a = pikepdf.Array(array)
assert a == array
@given(
recursive(
integers(1, 10) | booleans(),
lambda children: lists(children), # pylint: disable=unnecessary-lambda
max_leaves=20,
)
)
def test_nested_list2(self, array):
assume(isinstance(array, list))
a = pikepdf.Array(array)
assert a == array
def test_array_of_array(self):
a = Array([1, 2])
a2 = Array(a)
assert a == a2
assert a is not a2
def test_array_of_primitives_eq(self):
a = Array([True, False, 0, 1, 42, 42.42])
b = Array([True, False, 0, 1, 42, 42.42])
assert a == b
c = Array([1.0, 0.0, 0.0, 1.0, 42.0, 42.42])
assert a == c
def test_list_apis(self):
a = pikepdf.Array([1, 2, 3])
a[1] = None
assert a[1] is None
assert len(a) == 3
del a[1]
assert len(a) == 2
a[-1] = Name('/Foo')
with pytest.raises(IndexError):
a[-5555] = Name.Foo
assert a == pikepdf.Array([1, Name.Foo])
a.append(4)
assert a == pikepdf.Array([1, Name.Foo, 4])
a.extend([42, 666])
assert a == pikepdf.Array([1, Name.Foo, 4, 42, 666])
with pytest.raises(ValueError, match='object is not a dictionary'):
del a.ImaginaryKey
with pytest.raises(TypeError, match=r"items\(\) not available"):
a.items()
def test_array_contains(self):
a = pikepdf.Array([Name.One, Name.Two])
assert Name.One in a
assert Name.Two in a
assert Name.N not in a
a = pikepdf.Array([1, 2, 3])
assert 1 in a
assert 3 in a
assert 42 not in a
with pytest.raises(TypeError):
assert 'forty two' not in a
with pytest.raises(TypeError):
assert b'forty two' not in a
assert pikepdf.String('forty two') not in a
a = pikepdf.Array(['1234', b'\x80\x81\x82'])
assert pikepdf.String('1234') in a
assert pikepdf.String(b'\x80\x81\x82') in a
def test_no_len():
with pytest.raises(TypeError):
len(Name.Foo)
len(String('abc'))
class TestName:
def test_name_equality(self):
# Who needs transitivity? :P
# While this is less than ideal ('/Foo' != b'/Foo') it allows for slightly
# sloppy tests like if colorspace == '/Indexed' without requiring
# Name('/Indexed') everywhere
assert Name('/Foo') == '/Foo'
assert Name('/Foo') == b'/Foo'
assert Name.Foo == Name('/Foo')
def test_unslashed_name(self):
with pytest.raises(ValueError, match='must begin with'):
assert Name('Monty') not in [] # pylint: disable=expression-not-assigned
def test_empty_name(self):
with pytest.raises(ValueError):
Name('')
with pytest.raises(ValueError):
Name('/')
def test_forbidden_name_usage(self):
with pytest.raises(AttributeError, match="may not be set on pikepdf.Name"):
Name.Monty = Name.Python
with pytest.raises(TypeError, match="not subscriptable"):
Name['/Monty'] # pylint: disable=pointless-statement
if sys.implementation.name == 'pypy':
pytest.xfail(reason="pypy seems to do setattr differently")
with pytest.raises(AttributeError, match="has no attribute"):
monty = Name.Monty
monty.Attribute = 42
def test_bytes_of_name(self):
assert bytes(Name.ABC) == b'/ABC'
def test_name_from_name(self):
foo = Name('/Foo')
assert Name(foo) == foo
class TestHashViolation:
def check(self, a, b):
assert a == b, "invalid test case"
assert hash(a) == hash(b), "hash violation"
def test_unequal_but_similar(self):
assert Name('/Foo') != String('/Foo')
def test_numbers(self):
self.check(Object.parse('1.0'), 1)
self.check(Object.parse('42'), 42)
def test_bool_comparison(self):
self.check(Object.parse('0.0'), False)
self.check(True, 1)
def test_string(self):
utf16 = b'\xfe\xff' + 'hello'.encode('utf-16be')
self.check(String(utf16), String('hello'))
def test_name(self):
self.check(Name.This, Name('/This'))
def test_operator(self):
self.check(Operator('q'), Operator('q'))
def test_array_not_hashable(self):
with pytest.raises(TypeError):
{Array([3]): None} # pylint: disable=expression-not-assigned
def test_not_constructible():
with pytest.raises(TypeError, match="constructor"):
Object()
class TestRepr:
def test_repr_dict(self):
d = Dictionary(
{
'/Boolean': True,
'/Integer': 42,
'/Real': Decimal('42.42'),
'/String': String('hi'),
'/Array': Array([1, 2, 3.14]),
'/Operator': Operator('q'),
'/Dictionary': Dictionary({'/Color': 'Red'}),
'/None': None,
}
)
short_pi = '3.14'
expected = (
"""\
pikepdf.Dictionary({
"/Array": [ 1, 2, Decimal('%s') ],
"/Boolean": True,
"/Dictionary": {
"/Color": "Red"
},
"/Integer": 42,
"/None": None,
"/Operator": pikepdf.Operator("q"),
"/Real": Decimal('42.42'),
"/String": "hi"
})
"""
% short_pi
)
def strip_all_whitespace(s):
return ''.join(s.split())
assert strip_all_whitespace(repr(d)) == strip_all_whitespace(expected)
assert eval(repr(d)) == d
def test_repr_scalar(self):
scalars = [
False,
666,
Decimal('3.14'),
String('scalar'),
Name('/Bob'),
Operator('Q'),
]
for s in scalars:
assert eval(repr(s)) == s
def test_repr_indirect(self, resources):
with pikepdf.open(resources / 'graph.pdf') as graph:
repr_page0 = repr(graph.pages[0])
assert repr_page0[0] == '<', 'should not be constructible'
def test_repr_circular(self):
with pikepdf.new() as pdf:
pdf.Root.Circular = pdf.make_indirect(Dictionary())
pdf.Root.Circular.Parent = pdf.make_indirect(Dictionary())
pdf.Root.Circular.Parent = pdf.make_indirect(pdf.Root.Circular)
assert '.get_object' in repr(pdf.Root.Circular)
def test_repr_indirect_page(self, resources):
with pikepdf.open(resources / 'outlines.pdf') as outlines:
assert 'from_objgen' in repr(outlines.Root.Pages.Kids)
# An indirect page reference in the Dests name tree
assert 'from_objgen' in repr(outlines.Root.Names.Dests.Kids[0].Names[1])
def test_operator_inline(resources):
with pikepdf.open(resources / 'image-mono-inline.pdf') as pdf:
instructions = parse_content_stream(pdf.pages[0], operators='BI ID EI')
assert len(instructions) == 1
_operands, operator = instructions[0]
assert operator == pikepdf.Operator("INLINE IMAGE")
def test_utf16_error():
with pytest.raises((UnicodeEncodeError, RuntimeError)):
str(encode('\ud801'))
class TestDictionary:
def test_contains(self):
d = Dictionary({'/Monty': 'Python', '/Flying': 'Circus'})
assert Name.Flying in d
assert Name('/Monty') in d
assert Name.Brian not in d
def test_none(self):
d = pikepdf.Dictionary({'/One': 1, '/Two': 2})
with pytest.raises(ValueError):
d['/Two'] = None
def test_init(self):
d1 = pikepdf.Dictionary({'/Animal': 'Dog'})
d2 = pikepdf.Dictionary(Animal='Dog')
assert d1 == d2
def test_kwargs(self):
d = pikepdf.Dictionary(A='a', B='b', C='c')
assert '/B' in d
assert 'B' in dir(d)
def test_iter(self):
d = pikepdf.Dictionary(A='a')
for k in d:
assert k == '/A'
assert d[k] == 'a'
def test_items(self):
d = pikepdf.Dictionary(A='a')
for _k in d.items():
pass
def test_str(self):
d = pikepdf.Dictionary(A='a')
with pytest.raises(NotImplementedError):
str(d)
def test_attr(self):
d = pikepdf.Dictionary(A='a')
with pytest.raises(AttributeError):
d.invalidname # pylint: disable=pointless-statement
def test_get(self):
d = pikepdf.Dictionary(A='a')
assert d.get(Name.A) == 'a'
assert d.get(Name.Resources, 42) == 42
def test_bad_name_init(self):
with pytest.raises(KeyError, match=r"must begin with '/'"):
pikepdf.Dictionary({'/Slash': 'dot', 'unslash': 'error'})
with pytest.raises(KeyError, match=r"must begin with '/'"):
pikepdf.Dictionary({'/': 'slash'})
def test_bad_name_set(self):
d = pikepdf.Dictionary()
d['/Slash'] = 'dot'
with pytest.raises(KeyError, match=r"must begin with '/'"):
d['unslash'] = 'error'
with pytest.raises(KeyError, match=r"may not be '/'"):
d['/'] = 'error'
def test_del_missing_key(self):
d = pikepdf.Dictionary(A='a')
with pytest.raises(KeyError):
del d.B
def test_int_access(self):
d = pikepdf.Dictionary()
with pytest.raises(TypeError, match="not an array"):
d[0] = 3
def test_wrong_contains_type(self):
d = pikepdf.Dictionary()
with pytest.raises(TypeError, match="can only contain Names"):
assert pikepdf.Array([3]) not in d
def test_dict_bad_params(self):
with pytest.raises(ValueError):
Dictionary({'/Foo': 1}, Bar=2)
def test_dict_of_dict(self):
d = Dictionary(One=1, Two=2)
d2 = Dictionary(d)
assert d == d2
assert d is not d2
def test_not_convertible():
class PurePythonObj:
def __repr__(self):
return 'PurePythonObj()'
c = PurePythonObj()
with pytest.raises(RuntimeError):
encode(c)
with pytest.raises(RuntimeError):
pikepdf.Array([1, 2, c])
d = pikepdf.Dictionary()
with pytest.raises(RuntimeError):
d.SomeKey = c
assert d != c
def test_json():
d = Dictionary(
{
'/Boolean': True,
'/Integer': 42,
'/Real': Decimal('42.42'),
'/String': String('hi'),
'/Array': Array([1, 2, 3.14]),
'/Dictionary': Dictionary({'/Color': 'Red'}),
}
)
json_bytes = d.to_json(False)
as_dict = json.loads(json_bytes)
assert as_dict == {
"/Array": [1, 2, 3.14],
"/Boolean": True,
"/Dictionary": {"/Color": "Red"},
"/Integer": 42,
"/Real": 42.42,
"/String": "hi",
}
class TestStream:
@pytest.fixture(scope="function")
def abcxyz_stream(self):
pdf = pikepdf.new()
data = b'abcxyz'
stream = Stream(pdf, data)
return stream
def test_stream_isinstance(self):
pdf = pikepdf.new()
stream = Stream(pdf, b'xyz')
assert isinstance(stream, Stream)
assert isinstance(stream, Object)
def test_stream_as_dict(self, abcxyz_stream):
stream = abcxyz_stream
assert Name.Length in stream
stream.TestAttrAccess = True
stream['/TestKeyAccess'] = True
stream[Name.TestKeyNameAccess] = True
assert len(stream.keys()) == 4 # Streams always have a /Length
assert all(
(v == len(stream.read_bytes()) or v == True) # noqa: E712
for k, v in stream.items()
)
assert stream.stream_dict.TestAttrAccess
assert stream.get(Name.MissingName, 3.14) == 3.14
assert {k for k in stream} == {
'/TestKeyAccess',
'/TestAttrAccess',
'/Length',
'/TestKeyNameAccess',
}
def test_stream_length_modify(self, abcxyz_stream):
stream = abcxyz_stream
with pytest.raises(KeyError):
stream.Length = 42
with pytest.raises(KeyError):
del stream.Length
def test_len_stream(self, abcxyz_stream):
with pytest.raises(TypeError):
len(abcxyz_stream) # pylint: disable=pointless-statement
assert len(abcxyz_stream.stream_dict) == 1
def test_stream_dict_oneshot(self):
pdf = pikepdf.new()
stream1 = Stream(pdf, b'12345', One=1, Two=2)
stream2 = Stream(pdf, b'67890', {'/Three': 3, '/Four': 4})
stream3 = pdf.make_stream(b'abcdef', One=1, Two=2)
assert stream1.One == 1
assert stream1.read_bytes() == b'12345'
assert stream2.Three == 3
assert stream3.One == 1
def test_stream_bad_params(self):
p = pikepdf.new()
with pytest.raises(TypeError, match='data'):
Stream(p)
def test_stream_no_dangling_stream_on_failure(self):
p = pikepdf.new()
num_objects = len(p.objects)
with pytest.raises(AttributeError):
Stream(p, b'3.14159', ['Not a mapping object'])
assert len(p.objects) == num_objects, "A dangling object was created"
def test_identical_streams_equal(self):
pdf = pikepdf.new()
stream1 = Stream(pdf, b'12345', One=1, Two=2)
stream2 = Stream(pdf, b'67890', {'/Three': 3, '/Four': 4})
assert stream1 == stream1
assert stream1 != stream2
def test_stream_data_equal(self):
pdf1 = pikepdf.new()
stream1 = Stream(pdf1, b'abc')
pdf2 = pikepdf.new()
stream2 = Stream(pdf2, b'abc')
stream21 = Stream(pdf2, b'abcdef')
assert stream1 == stream2
assert stream21 != stream2
stream2.stream_dict.SomeData = 1
assert stream2 != stream1
@pytest.fixture
def sandwich(resources):
with Pdf.open(resources / 'sandwich.pdf') as pdf:
yield pdf
class TestStreamReadWrite:
@pytest.fixture
def stream_object(self):
pdf = pikepdf.new()
return Stream(pdf, b'')
def test_basic(self, stream_object):
stream_object.write(b'abc')
assert stream_object.read_bytes() == b'abc'
def test_compressed_readback(self, stream_object):
stream_object.write(compress(b'def'), filter=Name.FlateDecode)
assert stream_object.read_bytes() == b'def'
def test_stacked_compression(self, stream_object):
double_compressed = compress(compress(b'pointless'))
stream_object.write(
double_compressed, filter=[Name.FlateDecode, Name.FlateDecode]
)
assert stream_object.read_bytes() == b'pointless'
assert stream_object.read_raw_bytes() == double_compressed
def test_explicit_decodeparms(self, stream_object):
double_compressed = compress(compress(b'pointless'))
stream_object.write(
double_compressed,
filter=[Name.FlateDecode, Name.FlateDecode],
decode_parms=[None, None],
)
assert stream_object.read_bytes() == b'pointless'
assert stream_object.read_raw_bytes() == double_compressed
def test_no_kwargs(self, stream_object):
with pytest.raises(TypeError):
stream_object.write(compress(b'x'), [Name.FlateDecode])
def test_ccitt(self, stream_object):
ccitt = b'\x00' # Not valid data, just for testing decode_parms
stream_object.write(
ccitt,
filter=Name.CCITTFaxDecode,
decode_parms=Dictionary(K=-1, Columns=8, Length=1),
)
def test_stream_bytes(self, stream_object):
stream_object.write(b'pi')
assert bytes(stream_object) == b'pi'
def test_invalid_filter(self, stream_object):
with pytest.raises(TypeError, match="filter must be"):
stream_object.write(b'foo', filter=[42])
def test_invalid_decodeparms(self, stream_object):
with pytest.raises(TypeError, match="decode_parms must be"):
stream_object.write(
compress(b'foo'), filter=Name.FlateDecode, decode_parms=[42]
)
def test_filter_decodeparms_mismatch(self, stream_object):
with pytest.raises(ValueError, match=r"filter.*and decode_parms"):
stream_object.write(
compress(b'foo'),
filter=[Name.FlateDecode],
decode_parms=[Dictionary(), Dictionary()],
)
def test_copy():
d = Dictionary(
{
'/Boolean': True,
'/Integer': 42,
'/Real': Decimal('42.42'),
'/String': String('hi'),
'/Array': Array([1, 2, 3.14]),
'/Dictionary': Dictionary({'/Color': 'Red'}),
}
)
d2 = copy(d)
assert d2 == d
assert d2 is not d
assert d2['/Dictionary'] == d['/Dictionary']
def test_object_iteration(sandwich):
expected = len(sandwich.objects)
loops = 0
for obj in sandwich.objects:
loops += 1
if isinstance(obj, Dictionary):
assert len(obj.keys()) >= 1
assert expected == loops
def test_object_not_iterable():
with pytest.raises(TypeError, match="__iter__ not available"):
iter(pikepdf.Name.A)
@pytest.mark.parametrize(
'obj', [Array([1]), Dictionary({'/A': 'b'}), Operator('q'), String('s')]
)
def test_object_isinstance(obj):
assert isinstance(obj, (Array, Dictionary, Operator, String, Stream))
assert isinstance(obj, type(obj))
assert isinstance(obj, Object)
def test_object_classes():
classes = [Array, Dictionary, Operator, String, Stream]
for cls in classes:
assert issubclass(cls, Object)
class TestOperator:
def test_operator_create(self):
Operator('q')
assert Operator('q') == Operator('q')
assert Operator('q') != Operator('Q')
def test_operator_str(self):
assert str(Operator('Do')) == 'Do'
def test_operator_bytes(self):
assert bytes(Operator('cm')) == b'cm'
def test_object_mapping(sandwich):
object_mapping = sandwich.pages[0].images
assert '42' not in object_mapping
assert '/R12' in object_mapping
assert '/R12' in object_mapping.keys()
def test_replace_object(sandwich):
d = Dictionary(Type=Name.Dummy)
profile = sandwich.Root.OutputIntents[0].DestOutputProfile.objgen
sandwich._replace_object(profile, d)
assert sandwich.Root.OutputIntents[0].DestOutputProfile == d
def test_swap_object(resources):
with Pdf.open(resources / 'fourpages.pdf') as pdf:
pdf.pages[0].MarkPage0 = True
pdf._swap_objects(pdf.pages[0].objgen, pdf.pages[1].objgen)
assert pdf.pages[1].MarkPage0
assert Name.MarkPage0 not in pdf.pages[0]
|
pikepdf/pikepdf
|
tests/test_object.py
|
Python
|
mpl-2.0
| 23,196
|
[
"Brian"
] |
7bd2a8ec428f497d6a3620e921819a16669c47819024da0ea7400894f9831ec8
|
from bok_choy.page_object import PageObject, PageLoadError, unguarded
from bok_choy.promise import BrokenPromise, EmptyPromise
from .course_page import CoursePage
from ...tests.helpers import disable_animations
from selenium.webdriver.common.action_chains import ActionChains
class NoteChild(PageObject):
url = None
BODY_SELECTOR = None
def __init__(self, browser, item_id):
super(NoteChild, self).__init__(browser)
self.item_id = item_id
def is_browser_on_page(self):
return self.q(css="{}#{}".format(self.BODY_SELECTOR, self.item_id)).present
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `NoteChild` context
"""
return "{}#{} {}".format(
self.BODY_SELECTOR,
self.item_id,
selector,
)
def _get_element_text(self, selector):
element = self.q(css=self._bounded_selector(selector)).first
if element:
return element.text[0]
else:
return None
class EdxNotesChapterGroup(NoteChild):
"""
Helper class that works with chapter (section) grouping of notes in the Course Structure view on the Note page.
"""
BODY_SELECTOR = ".note-group"
@property
def title(self):
return self._get_element_text(".course-title")
@property
def subtitles(self):
return [section.title for section in self.children]
@property
def children(self):
children = self.q(css=self._bounded_selector('.note-section'))
return [EdxNotesSubsectionGroup(self.browser, child.get_attribute("id")) for child in children]
class EdxNotesGroupMixin(object):
"""
Helper mixin that works with note groups (used for subsection and tag groupings).
"""
@property
def title(self):
return self._get_element_text(self.TITLE_SELECTOR)
@property
def children(self):
children = self.q(css=self._bounded_selector('.note'))
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def notes(self):
return [section.text for section in self.children]
class EdxNotesSubsectionGroup(NoteChild, EdxNotesGroupMixin):
"""
Helper class that works with subsection grouping of notes in the Course Structure view on the Note page.
"""
BODY_SELECTOR = ".note-section"
TITLE_SELECTOR = ".course-subtitle"
class EdxNotesTagsGroup(NoteChild, EdxNotesGroupMixin):
"""
Helper class that works with tags grouping of notes in the Tags view on the Note page.
"""
BODY_SELECTOR = ".note-group"
TITLE_SELECTOR = ".tags-title"
def scrolled_to_top(self, group_index):
"""
Returns True if the group with supplied group)index is scrolled near the top of the page
(expects 10 px padding).
The group_index must be supplied because JQuery must be used to get this information, and it
does not have access to the bounded selector.
"""
title_selector = "$('" + self.TITLE_SELECTOR + "')[" + str(group_index) + "]"
top_script = "return " + title_selector + ".getBoundingClientRect().top;"
EmptyPromise(
lambda: 8 < self.browser.execute_script(top_script) < 12,
"Expected tag title '{}' to scroll to top, but was at location {}".format(
self.title, self.browser.execute_script(top_script)
)
).fulfill()
# Now also verify that focus has moved to this title (for screen readers):
active_script = "return " + title_selector + " === document.activeElement;"
return self.browser.execute_script(active_script)
class EdxNotesPageItem(NoteChild):
"""
Helper class that works with note items on Note page of the course.
"""
BODY_SELECTOR = ".note"
UNIT_LINK_SELECTOR = "a.reference-unit-link"
TAG_SELECTOR = "a.reference-tags"
def go_to_unit(self, unit_page=None):
self.q(css=self._bounded_selector(self.UNIT_LINK_SELECTOR)).click()
if unit_page is not None:
unit_page.wait_for_page()
@property
def unit_name(self):
return self._get_element_text(self.UNIT_LINK_SELECTOR)
@property
def text(self):
return self._get_element_text(".note-comment-p")
@property
def quote(self):
return self._get_element_text(".note-excerpt")
@property
def time_updated(self):
return self._get_element_text(".reference-updated-date")
@property
def tags(self):
""" The tags associated with this note. """
tag_links = self.q(css=self._bounded_selector(self.TAG_SELECTOR))
if len(tag_links) == 0:
return None
return[tag_link.text for tag_link in tag_links]
def go_to_tag(self, tag_name):
""" Clicks a tag associated with the note to change to the tags view (and scroll to the tag group). """
self.q(css=self._bounded_selector(self.TAG_SELECTOR)).filter(lambda el: tag_name in el.text).click()
class EdxNotesPageView(PageObject):
"""
Base class for EdxNotes views: Recent Activity, Location in Course, Search Results.
"""
url = None
BODY_SELECTOR = ".tab-panel"
TAB_SELECTOR = ".tab"
CHILD_SELECTOR = ".note"
CHILD_CLASS = EdxNotesPageItem
@unguarded
def visit(self):
"""
Open the page containing this page object in the browser.
Raises:
PageLoadError: The page did not load successfully.
Returns:
PageObject
"""
self.q(css=self.TAB_SELECTOR).first.click()
try:
return self.wait_for_page()
except (BrokenPromise):
raise PageLoadError("Timed out waiting to load page '{!r}'".format(self))
def is_browser_on_page(self):
return all([
self.q(css="{}".format(self.BODY_SELECTOR)).present,
self.q(css="{}.is-active".format(self.TAB_SELECTOR)).present,
not self.q(css=".ui-loading").visible,
])
@property
def is_closable(self):
"""
Indicates if tab is closable or not.
"""
return self.q(css="{} .action-close".format(self.TAB_SELECTOR)).present
def close(self):
"""
Closes the tab.
"""
self.q(css="{} .action-close".format(self.TAB_SELECTOR)).first.click()
@property
def children(self):
"""
Returns all notes on the page.
"""
children = self.q(css=self.CHILD_SELECTOR)
return [self.CHILD_CLASS(self.browser, child.get_attribute("id")) for child in children]
class RecentActivityView(EdxNotesPageView):
"""
Helper class for Recent Activity view.
"""
BODY_SELECTOR = "#recent-panel"
TAB_SELECTOR = ".tab#view-recent-activity"
class CourseStructureView(EdxNotesPageView):
"""
Helper class for Location in Course view.
"""
BODY_SELECTOR = "#structure-panel"
TAB_SELECTOR = ".tab#view-course-structure"
CHILD_SELECTOR = ".note-group"
CHILD_CLASS = EdxNotesChapterGroup
class TagsView(EdxNotesPageView):
"""
Helper class for Tags view.
"""
BODY_SELECTOR = "#tags-panel"
TAB_SELECTOR = ".tab#view-tags"
CHILD_SELECTOR = ".note-group"
CHILD_CLASS = EdxNotesTagsGroup
class SearchResultsView(EdxNotesPageView):
"""
Helper class for Search Results view.
"""
BODY_SELECTOR = "#search-results-panel"
TAB_SELECTOR = ".tab#view-search-results"
class EdxNotesPage(CoursePage):
"""
EdxNotes page.
"""
url_path = "edxnotes/"
MAPPING = {
"recent": RecentActivityView,
"structure": CourseStructureView,
"tags": TagsView,
"search": SearchResultsView,
}
def __init__(self, *args, **kwargs):
super(EdxNotesPage, self).__init__(*args, **kwargs)
self.current_view = self.MAPPING["recent"](self.browser)
def is_browser_on_page(self):
return self.q(css=".wrapper-student-notes .note-group").visible
def switch_to_tab(self, tab_name):
"""
Switches to the appropriate tab `tab_name(str)`.
"""
self.current_view = self.MAPPING[tab_name](self.browser)
self.current_view.visit()
def close_tab(self):
"""
Closes the current view.
"""
self.current_view.close()
self.current_view = self.MAPPING["recent"](self.browser)
def search(self, text):
"""
Runs search with `text(str)` query.
"""
self.q(css="#search-notes-form #search-notes-input").first.fill(text)
self.q(css='#search-notes-form .search-notes-submit').first.click()
# Frontend will automatically switch to Search results tab when search
# is running, so the view also needs to be changed.
self.current_view = self.MAPPING["search"](self.browser)
if text.strip():
self.current_view.wait_for_page()
@property
def tabs(self):
"""
Returns all tabs on the page.
"""
tabs = self.q(css=".tabs .tab-label")
if tabs:
return map(lambda x: x.replace("Current tab\n", ""), tabs.text)
else:
return None
@property
def is_error_visible(self):
"""
Indicates whether error message is visible or not.
"""
return self.q(css=".inline-error").visible
@property
def error_text(self):
"""
Returns error message.
"""
element = self.q(css=".inline-error").first
if element and self.is_error_visible:
return element.text[0]
else:
return None
@property
def notes(self):
"""
Returns all notes on the page.
"""
children = self.q(css='.note')
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def chapter_groups(self):
"""
Returns all chapter groups on the page.
"""
children = self.q(css='.note-group')
return [EdxNotesChapterGroup(self.browser, child.get_attribute("id")) for child in children]
@property
def subsection_groups(self):
"""
Returns all subsection groups on the page.
"""
children = self.q(css='.note-section')
return [EdxNotesSubsectionGroup(self.browser, child.get_attribute("id")) for child in children]
@property
def tag_groups(self):
"""
Returns all tag groups on the page.
"""
children = self.q(css='.note-group')
return [EdxNotesTagsGroup(self.browser, child.get_attribute("id")) for child in children]
class EdxNotesPageNoContent(CoursePage):
"""
EdxNotes page -- when no notes have been added.
"""
url_path = "edxnotes/"
def is_browser_on_page(self):
return self.q(css=".wrapper-student-notes .is-empty").visible
@property
def no_content_text(self):
"""
Returns no content message.
"""
element = self.q(css=".is-empty").first
if element:
return element.text[0]
else:
return None
class EdxNotesUnitPage(CoursePage):
"""
Page for the Unit with EdxNotes.
"""
url_path = "courseware/"
def is_browser_on_page(self):
return self.q(css="body.courseware .edx-notes-wrapper").present
def move_mouse_to(self, selector):
"""
Moves mouse to the element that matches `selector(str)`.
"""
body = self.q(css=selector)[0]
ActionChains(self.browser).move_to_element(body).release().perform()
return self
def click(self, selector):
"""
Clicks on the element that matches `selector(str)`.
"""
self.q(css=selector).first.click()
return self
def toggle_visibility(self):
"""
Clicks on the "Show notes" checkbox.
"""
self.q(css=".action-toggle-notes").first.click()
return self
@property
def components(self):
"""
Returns a list of annotatable components.
"""
components = self.q(css=".edx-notes-wrapper")
return [AnnotatableComponent(self.browser, component.get_attribute("id")) for component in components]
@property
def notes(self):
"""
Returns a list of notes for the page.
"""
notes = []
for component in self.components:
notes.extend(component.notes)
return notes
def refresh(self):
"""
Refreshes the page and returns a list of annotatable components.
"""
self.browser.refresh()
return self.components
class AnnotatableComponent(NoteChild):
"""
Helper class that works with annotatable components.
"""
BODY_SELECTOR = ".edx-notes-wrapper"
@property
def notes(self):
"""
Returns a list of notes for the component.
"""
notes = self.q(css=self._bounded_selector(".annotator-hl"))
return [EdxNoteHighlight(self.browser, note, self.item_id) for note in notes]
def create_note(self, selector=".annotate-id"):
"""
Create the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.select_and_click_adder()
yield note
note.save()
def edit_note(self, selector=".annotator-hl"):
"""
Edit the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().edit()
yield note
note.save()
def remove_note(self, selector=".annotator-hl"):
"""
Removes the note by the selector.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().remove()
class EdxNoteHighlight(NoteChild):
"""
Helper class that works with notes.
"""
BODY_SELECTOR = ""
ADDER_SELECTOR = ".annotator-adder"
VIEWER_SELECTOR = ".annotator-viewer"
EDITOR_SELECTOR = ".annotator-editor"
def __init__(self, browser, element, parent_id):
super(EdxNoteHighlight, self).__init__(browser, parent_id)
self.element = element
self.item_id = parent_id
disable_animations(self)
@property
def is_visible(self):
"""
Returns True if the note is visible.
"""
viewer_is_visible = self.q(css=self._bounded_selector(self.VIEWER_SELECTOR)).visible
editor_is_visible = self.q(css=self._bounded_selector(self.EDITOR_SELECTOR)).visible
return viewer_is_visible or editor_is_visible
def wait_for_adder_visibility(self):
"""
Waiting for visibility of note adder button.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.ADDER_SELECTOR), "Adder is visible."
)
def wait_for_viewer_visibility(self):
"""
Waiting for visibility of note viewer.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.VIEWER_SELECTOR), "Note Viewer is visible."
)
def wait_for_editor_visibility(self):
"""
Waiting for visibility of note editor.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.EDITOR_SELECTOR), "Note Editor is visible."
)
def wait_for_notes_invisibility(self, text="Notes are hidden"):
"""
Waiting for invisibility of all notes.
"""
selector = self._bounded_selector(".annotator-outer")
self.wait_for_element_invisibility(selector, text)
def select_and_click_adder(self):
"""
Creates selection for the element and clicks `add note` button.
"""
ActionChains(self.browser).double_click(self.element).release().perform()
self.wait_for_adder_visibility()
self.q(css=self._bounded_selector(self.ADDER_SELECTOR)).first.click()
self.wait_for_editor_visibility()
return self
def click_on_highlight(self):
"""
Clicks on the highlighted text.
"""
ActionChains(self.browser).move_to_element(self.element).click().release().perform()
return self
def click_on_viewer(self):
"""
Clicks on the note viewer.
"""
self.q(css=self._bounded_selector(self.VIEWER_SELECTOR)).first.click()
return self
def show(self):
"""
Hover over highlighted text -> shows note.
"""
ActionChains(self.browser).move_to_element(self.element).release().perform()
self.wait_for_viewer_visibility()
return self
def cancel(self):
"""
Clicks cancel button.
"""
self.q(css=self._bounded_selector(".annotator-cancel")).first.click()
self.wait_for_notes_invisibility("Note is canceled.")
return self
def save(self):
"""
Clicks save button.
"""
self.q(css=self._bounded_selector(".annotator-save")).first.click()
self.wait_for_notes_invisibility("Note is saved.")
self.wait_for_ajax()
return self
def remove(self):
"""
Clicks delete button.
"""
self.q(css=self._bounded_selector(".annotator-delete")).first.click()
self.wait_for_notes_invisibility("Note is removed.")
self.wait_for_ajax()
return self
def edit(self):
"""
Clicks edit button.
"""
self.q(css=self._bounded_selector(".annotator-edit")).first.click()
self.wait_for_editor_visibility()
return self
@property
def text(self):
"""
Returns text of the note.
"""
self.show()
element = self.q(css=self._bounded_selector(".annotator-annotation > div.annotator-note"))
if element:
text = element.text[0].strip()
else:
text = None
self.q(css=("body")).first.click()
self.wait_for_notes_invisibility()
return text
@text.setter
def text(self, value):
"""
Sets text for the note.
"""
self.q(css=self._bounded_selector(".annotator-item textarea")).first.fill(value)
@property
def tags(self):
"""
Returns the tags associated with the note.
Tags are returned as a list of strings, with each tag as an individual string.
"""
tag_text = []
self.show()
tags = self.q(css=self._bounded_selector(".annotator-annotation > div.annotator-tags > span.annotator-tag"))
if tags:
for tag in tags:
tag_text.append(tag.text)
self.q(css="body").first.click()
self.wait_for_notes_invisibility()
return tag_text
@tags.setter
def tags(self, tags):
"""
Sets tags for the note. Tags should be supplied as a list of strings, with each tag as an individual string.
"""
self.q(css=self._bounded_selector(".annotator-item input")).first.fill(" ".join(tags))
def has_sr_label(self, sr_index, field_index, expected_text):
"""
Returns true iff a screen reader label (of index sr_index) exists for the annotator field with
the specified field_index and text.
"""
label_exists = False
EmptyPromise(
lambda: len(self.q(css=self._bounded_selector("li.annotator-item > label.sr"))) > sr_index,
"Expected more than '{}' sr labels".format(sr_index)
).fulfill()
annotator_field_label = self.q(css=self._bounded_selector("li.annotator-item > label.sr"))[sr_index]
for_attrib_correct = annotator_field_label.get_attribute("for") == "annotator-field-" + str(field_index)
if for_attrib_correct and (annotator_field_label.text == expected_text):
label_exists = True
self.q(css="body").first.click()
self.wait_for_notes_invisibility()
return label_exists
|
B-MOOC/edx-platform
|
common/test/acceptance/pages/lms/edxnotes.py
|
Python
|
agpl-3.0
| 20,547
|
[
"VisIt"
] |
baf88cc29c24a5227913383de3697c408f2055769836a17a3d5ee2632fe1c923
|
import json
import os
from International import Languages
class Keys:
last_idf_folder = 'last_idf_folder'
last_epw_folder = 'last_epw_folder'
last_idf = 'last_idf'
last_epw = 'last_epw'
language = 'language'
def load_settings(settings_file_name):
try:
settings = json.load(open(settings_file_name))
except Exception:
settings = {}
if Keys.last_idf_folder not in settings:
settings[Keys.last_idf_folder] = os.path.expanduser("~")
if Keys.last_epw_folder not in settings:
settings[Keys.last_epw_folder] = os.path.expanduser("~")
if Keys.last_idf not in settings:
settings[Keys.last_idf] = '/path/to/idf'
if Keys.last_epw not in settings:
settings[Keys.last_epw] = '/path/to/epw'
if Keys.language not in settings:
settings[Keys.language] = Languages.English
return settings
def save_settings(settings, settings_file_name):
try:
json.dump(settings, open(settings_file_name, 'w'))
except Exception:
pass
|
Myoldmopar/EPLaunchLight
|
EPLaunchLite/Settings.py
|
Python
|
bsd-3-clause
| 1,038
|
[
"EPW"
] |
3bdc051035044e4b66cd923319710eef9c87f66b593040315d21aea41a39eafe
|
'''
This module contains database routines that may be accessed with or without using a web framework. All use SQLObject; this
adds a bit of overhead, but provides nice exception management.
The connection object is set locally based on parameters in the projectSpecs import. We're using MySQL. The database
schema is stored in a file called (cleverly enough) database schema.
Methods available:
ManageURLs:
addUrlsFromList(self, urls, deleteMe = False, searchId = None, source = 'test', depth = 0, order = None)
- add a list of urls, optionally recording the order in which they appear; adds to url, url_search tables
deleteFlaggedUrls()
- delete any rows with the deleteMe flag set to true
addCatchFromUrlVisit
- adds content, links, and tags from visited URL (assumes we know the searchid and urlid)
TODO: optionally allow user to discard non-conforming content from the existing search database when changing search criteria
TODO: search the database to add conforming content from other searches to current search
'''
#standard modules
from sqlobject import mysql, SQLObject, StringCol, dberrors #change the import to use dbs other than MySQL
from datetime import datetime
import logging
import unittest
import random
import test
#custom modules
import projectSpecs as projectSpecs
import findDate as dateFinder
#instansiate imported classes
ancestorDepth = 20 #layers of nested, hierarchal url parse tree to use searching for dates
findObj = dateFinder.FindDateInSoup(ancestorDepth)
#log settings
LOG_NAME = "master.log"
LOG_LEVEL = logging.DEBUG
#return codes
RETURN_SUCCESS = projectSpecs.RETURN_SUCCESS
RETURN_FAIL = projectSpecs.RETURN_FAIL
#database parameters and connection object using specs from an external file; the connection object is global to this module
dbUser = projectSpecs.dbUser
dbPass = projectSpecs.dbPass
dbSchema = projectSpecs.dbSchema
dbHost = projectSpecs.dbHost
tableName_Url =projectSpecs.tableName_Url
tableName_Url_Tags= projectSpecs.tableName_Url_Tags
tableName_Url_Search = projectSpecs.tableName_Url_Search
tableName_Content = projectSpecs.tableName_Content
tableName_Content_Search =projectSpecs.tableName_Content_Search
tableName_Content_Score = projectSpecs.tableName_Content_Score
tableName_Search = projectSpecs.tableName_Search
tableName_Metasearch_Search = projectSpecs.tableName_Metasearch_Search
tableName_Url_Html = projectSpecs.tableName_Url_Html
tableName_Search_Viewcriteria = projectSpecs.tableName_Search_Viewcriteria
tableName_BadUrlFragment=projectSpecs.tableName_Negationwords
tableName_BadUrlFragment=projectSpecs.tableName_Poswords
tableName_BadUrlFragment=projectSpecs.tableName_Negwords
tableName_BadUrlFragment=projectSpecs.tableName_Obscenewords
tableName_BadUrlFragment=projectSpecs.tableName_Scoremethods
tableName_BadUrlFragment=projectSpecs.tableName_Scores
tableName_BadUrlFragment=projectSpecs.tableName_Wordcount
conn = mysql.builder()(user=dbUser, password=dbPass, host=dbHost, db=dbSchema, use_unicode = True, sqlobject_encoding = 'utf8')
a=1
###Database Objects###
class Url(SQLObject):
#An object to represent the table 'Urls'
_connection = conn #set the connection object
class sqlmeta:
fromDatabase = True #uses db table names and characteristics
table = tableName_Url #this allows the table to be called something besides the class name
class UrlSearch(SQLObject):
#Representes relates search id, order, depth to the url
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Url_Search
class Search(SQLObject):
#Representes meta information for the url
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Search
class BadUrlFragment(SQLObject):
#Representes meta information for the url
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_BadUrlFragment
class UrlTags(SQLObject):
#Representes meta information for the url
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Url_Tags
class UrlHtml(SQLObject):
#Representes meta information for the url
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Url_Html
class Content(SQLObject):
'''Representes meta information for the url. A couple o'notes here. The db filed 'content'
is specified as a text type (56k). However, MySQL won't index anything bigger than a VARCHAR(256),
so we can't make content a unique key. To get around this, I've put in another column called 'shortCont'
to hold the first 50 characters of content. Ugly but effective.
'''
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Content
class ContentSearch(SQLObject):
#Representes grouping of searches in to a metasearch object (allows similar ones to be combined easily)
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Content_Search
class SearchViewcriteria(SQLObject):
#Representes grouping of searches in to a metasearch object (allows similar ones to be combined easily)
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Search_Viewcriteria
class NegationWords(SQLObject):
#Negation words e.g., not, nor
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Negationwords
class PosWords(SQLObject):
#Positive words
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Poswords
class NegWords(SQLObject):
#Negative words
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Negwords
class ObsceneWords(SQLObject):
#List of obscene words
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Obscenewords
class ScoreMethods(SQLObject):
#Methods for scoring
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Scoremethods
class Scores(SQLObject):
#Scores for each bit of content, for each scoring method
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Scores
class WordCount(SQLObject):
#Number of pos, neg, obscene words
_connection = conn
class sqlmeta:
fromDatabase = True
table = tableName_Wordcount
###Error Class(es)
class DbError(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
###Management Classes
class DatabaseMethods():
'''This class adds urls from a list. If doOrder is true, we'll add an "order" parameter to the table to indicate
the search engine's sequence of sites for this search (which we may use as an indicator of authority).
'''
def __init__(self):
pass
def gatherSearchSpecs(self, searchid= None):
#pulls search specifications from db
if searchid:
try: return Search.selectBy(id=searchid)
except: raise DbError("Search requested: %s can't be found"%str(searchid))
def gatherSearchIds(self, urlid = None):
try: return UrlSearch.selectBy(urlid = urlid)
except: raise DbError("Lookup in urlSearch Failed for %i"%urlid)
def addSingleUrl(self, url, deleteMe = False, searchId = None, source = 'unknown', depth = 0, order = None, urlIx = 0):
#adds a URL, updating tables Urls and Url_Search; returns the url id or an error
urlobj = None
try:
urlobj = Url(url = url, addDate = datetime.now(), deleteMe = deleteMe)
except dberrors.DuplicateEntryError:
logging.info("Failed to add duplicate url %s"%url)
except Exception, e:
logging.info("addUrls encountered exception: %s while adding url %s"%(e, url))
raise DbError("error inserting url %s"%url)
#If we have added the url succesfully (or it's a duplicate) also update the url_search table (note, 'order' is optional - intended to handle
# record the search engines' idea of the relevance/authority)
if urlobj:
try:
urlSearchObj = UrlSearch(urlid = urlobj.id, source = source, depth = depth, searchid = searchId, urlorder = order)
return urlobj.id
except Exception, e:
logging.info("addUrls encountered exception: %s while adding url %s to the urlSearch table"%(e, url))
raise DbError("error inserting url %s: %s" %(url,e))
def goodUrl(self, url):
#Screens out obviously bad urls by name using a list stored in the BadUrlFragment table. Also insures that they at least look
# legit http*. The table contains strings like .pdf, .doc, .xls to weed out documents, along with names of sites we know we don't want.
badFrags = BadUrlFragment.select()
badCount = badFrags.count()
if badCount == 0: #if we don't have any know bad url bits in our db, evaluate the url to be good
return True
if url[:4].lower() <> 'http':
return False
good = True; ix = 0; done = False
while not done:
frag = badFrags[ix].badString
if url.find(frag) > 0:
return False
ix +=1
if ix >= badCount: done = True
return True
def addUrlsFromList(self, urls, deleteMe = False, searchId = None, source = None, depth = 0, order = None):
#add urls from a vetted list to the database
if not isinstance(urls, list): #makes sure a single entry is treated as a short list
urls = [urls]
urlIx = 0
for url in urls:
urlIx=+1
if self.goodUrl(url): #screen out urls based on name
try:
ret = self.addSingleUrl(url = url, deleteMe = deleteMe, searchId = searchId, source = source, depth = depth, order =urlIx)
except Exception, e:
logging.info("addUrls encountered exception: %s while adding url %s to the urlSearch table"%(e, url))
raise DbError("error inserting url %s"%url)
return RETURN_SUCCESS
def deleteFlaggedUrls(self):
#cleans up the database by deleting the rows with the deleteMe flag set (affects both url and url_search)
try:
Url.deleteBy(deleteMe =True)
return RETURN_SUCCESS
except Exception, e:
logging.info("Failed to delete flagged urls. %s"%e)
raise
def deleteFlaggedSearches(self):
#cleans up the database by deleting the rows with the deleteMe flag set
try:
Search.deleteBy(deleteMe =True)
return RETURN_SUCCESS
except Exception, e:
logging.info("Failed to delete flagged searches. %s"%e)
raise
def deleteUrlsForSearch(self, searchid):
for s in UrlSearch.selectBy(searchid = searchid):
Url.delete(s.urlid)
def addCatchFromUrlVisit(self, urlid, searchid, catchDict, urlAddDepth = 2, deleteMe = False, criteriaid=None):
'''Parse out the dict from a visit {content, metaData, links} and add to db. We'll try to add dates
to successfully-addded content).
'''
#update both content and contentSearch table
contentSoup = catchDict['contentAsSoupObjects']
contentUnicode = catchDict['polishedCont']
for s, u in zip(contentSoup,contentUnicode ):
try:
shortCont = u[:50]
#try first to add the content; only if we do so will we try to parse out the date
contObj = Content(content = u, shortCont = shortCont, dateAcquired = datetime.now(), criteriaid = criteriaid)
csObj = ContentSearch(contentid = contObj.id, searchid = searchid, urlid = urlid)
#parse the date
date = None
#logging.info( "trying to find a date in %s"%s)
#date = findObj.findDate_main(s)
if date:
contObj.datePosted = date
logging.info("found date: %s for content %s" %(str(date), shortCont))
else:
logging.info( "no date for content %s"%str(shortCont))
except dberrors.DuplicateEntryError:
#if we already have this content bit
pass
except Exception, e:
msg = "couldn't add content %s for searchid %s and urlid %s"%(u,str(searchid), str(urlid))
logging.info("%s: %s" %(msg, e))
raise
#delete existing metatags for this url then update with today's info
try:
UrlTags.deleteBy(urlid = urlid)
metaData = catchDict['metaData']
for m in metaData:
name, value = m
urlTagObj = UrlTags(urlid =urlid, name = name[:20], value = value[:20])
except:
msg = "couldn't add tags %s for searchid %s and urlid %s"%(value,str(searchid), str(urlid))
logging.info(msg)
raise
#Add any newly-harvested links to the url table. Their depth will be one more than the initiating url's. The urlAddDepth parameter allows us
# to control the amount of recursion. A url added via google or an upload has a depth=0, urls captured from the initial ones have a depth=1, urls captured
# by following these links have a depth=3, and so on.
urlSearchObj = UrlSearch.selectBy(urlid = urlid, searchid = searchid)
depth = urlSearchObj[0].depth + 1
if depth <= urlAddDepth:
links = catchDict['links']
try:
for link in links:
self.addSingleUrl(url = link, deleteMe = deleteMe, searchId = searchid, source = str(urlid), depth = depth)
except dberrors.DuplicateEntryError:
pass
except:
logging.info("couldn't add new url %s"%link)
raise
return RETURN_SUCCESS
#"Getters" to return results of queries; all return iterable objects
def getUrl(self, urlid):
#returns a url db object
try: return Url.get(urlid)
except: raise DbError("Url %s doesn't exist in url table"%str(urlid))
def getAllUrls(self):
try: return Url.select()
except: raise DbError("getAllUrls failed")
def getAllSearches(self):
try: return Search.select()
except: raise DbError("getAllSearches failed")
def getContentForSearch(self, searchid, limit = None):
ret = []
if limit:
contSearch = ContentSearch.selectBy(searchid=searchid).limit(limit)
else:
contSearch = ContentSearch.selectBy(searchid=searchid)
for c in contSearch:
ret.append(Content.get(c.contentid))
return ret
def getUrlsForSearch(self, searchid, limit = None):
#find urls in urlSearch table
urls = []
try:
if limit:
urlsearch = UrlSearch.selectBy(searchid = searchid).limit(limit)
else:
urlsearch = UrlSearch.selectBy(searchid = searchid)
for u in urlsearch:
try:
urls.append(Url.get(u.urlid))
except SQLObjectNotFound:
raise DbError("The UrlSearch table has entry for url %i, which doesn't exist" %u.urlid)
return urls
except Exception, e:
raise DbError("getUrlsForSearch failed")
def cleanUpOrphanedContent(self):
#cleans up content that belongs to no search
for c in Content.select():
if ContentSearch.selectBy(contentid = c.id).count() == 0:
Content.delete(c.id)
def getUrlsForSearchWithGoodHtml(self, searchid):
#for some searchid, return all the url ids for which we have harvested html
ret = []
allUrls = self.getUrlsForSearch(searchid)
for url in allUrls:
htmlSelect = UrlHtml.selectBy(urlid = url.id)
for h in htmlSelect:
if len(h.html) > 10:
ret.append(h)
return ret
def getHtmlForUrl(self, urlid):
#find all content for a search
try: return UrlHtml.selectBy(urlid = urlid)
except: raise DbError("getHtmlForUrl failed")
def getSearchesforUrl(self, urlid):
#find all searches associated with a url
try: return UrlSearch.selectBy(urlid = urlid)
except: raise DbError("UrlSearch failed")
def getParseCriteriaForSearch(self, searchid):
''''Find the search criterion associated with a search (there's one per search);
return a dict of searchid, object (since content is stored by search we need to keep these together)
'''
criteria = SearchViewcriteria.selectBy(searchid = searchid)
if criteria.count() == 0:
try:
s = Search.get(searchid)
SearchViewcriteria(searchid= searchid, exclude = s.exclude, include = s.include)
criteria = SearchViewcriteria.selectBy(searchid = searchid)
except SQLObjectNotFound:
msg = "getParseCriteriaForSearch couldn't add a SearchViewcriteria object for search %i" %searchid
logging.debug(msg)
except:
raise DbError("getParseCriteriaForSearch couldn't add a SearchViewcriteria object for search %i" %searchid)
try: return {'searchid': searchid, 'criteria': SearchViewcriteria.selectBy(searchid = searchid)}
except: raise DbError("SearchesCriteria failed")
def getParseCriteriaForUrl(self, urlid):
'''find the search criterion associated with a url (there may be many searches associated w/ a url)
return a list of dict of searchid, object (content is stored by search)
'''
criteria = []
try:
UrlSearchObjs = self.getSearchesforUrl(urlid)
for s in UrlSearchObjs:
criteria.append(self.getParseCriteriaForSearch(s.searchid))
return criteria
except: raise DbError("SearchesCriteria failed")
def getHtmlForSearch(self, searchid):
#find all html entries for a search
ret = []
try:
urlObjs = self.getUrlsForSearch(searchid)
for u in urlObjs:
#if we've added html for this url, we'll get one item returned; otherwise the return will not have elem[0]
try:
ret.append(UrlHtml.selectBy(urlid=u.id)[0])
except:
pass
except:
raise DbError("Can't find html object")
return ret
def deleteRawHtmlNoParse(self):
#deletes captured html
urlhtml = self.getUrlHtmlObj()
allrows = urlhtml.select()
for row in allrows:
row.html = ''
def haveWordCountForContent(self, contentid):
#tests to see if we have word counts for a content block
allWords = 0
try:
wcRecord = WordCount.selectBy(contentid = contentid)
#do we have an entry in the WordCount table for this content?
if wcRecord.count() > 0:
return True
else:
return False
except Exception, e:
raise DbError("error looking up word count for %i"%contentid)
def getWordCountFor(self, contentid):
#returns the pos, neg, obscene word count for a content item
try:
return WordCount.selectBy(contentid = contentid)
except:
logging.info("Failed to get word count for content: %i"%contentid)
raise
def updateWordCount(self, contentid, posWords, negWords, obsWords):
#adds info to the WordCount table ('contentid' is really a content row object)
try:
dbRow = WordCount.selectBy(contentid = contentid)
if dbRow.count() == 0: #new entry
newRow = WordCount(pos = posWords, neg = negWords, obscene = obsWords, contentid = contentid)
a=1
else:
dbRow[0].pos = posWords
dbRow[0].neg = negWords
dbRow[0].obs = obsWords
except Exception ,e:
logging.info("Failed to update word count for contentid %i %s" %(int(contentid.id), e))
#raise
a=1
def getScoreMethods(self):
try:
return ScoreMethods.select()
except:
logging.info("Failed to find ScoreMethods object")
raise
def getScoreMethodFor(self, methodid):
try:
return ScoreMethods.selectBy(id = methodid)
except:
logging.info("Failed to find ScoreMethods object")
raise
def getScoresObject(self):
try:
return Scores.select()
except:
logging.info("Failed to find Scores object")
raise
def getScoresObjectForContentMethod(self, contentid, methodid):
try:
return Scores.selectBy(contentid = contentid, methodid = methodid)
except:
logging.info("Failed to find Scores object")
raise
return None
def setScoreForContentMethod(self, score, contentid, methodid, overwrite):
'''sets scores for a content id and scoring method (if there's already a record and overwrite = true)
adds new score if there isn't one
'''
scoreObj = self.getScoresObjectForContentMethod(contentid, methodid)
scoreMethodsObj = self.getScoreMethodFor(methodid = methodid)
if scoreObj.count() == 0: #new entry
newRow = Scores(score = score, contentid = contentid, methodid = scoreMethodsObj)
else:
if overwrite: #update old entry if overwrite is True
scoreObj.score = score
def generateWordListFrom(self, name):
#Returns a list of the words found in a word list data table
if name == 'pos': words = PosWords.select()
if name == 'neg': words = NegWords.select()
if name == 'obs':words = ObsceneWords.select()
ret = []
for w in words:
ret.append(w.word)
return ret
### This might be overkill, but these simply return database objects to other modules
def getUrlHtmlObj(self):
try: return UrlHtml
except: raise DbError("Can't find UrlHtml object")
def getSearchObj(self):
try: return Search
except: raise DbError("Can't find Search object")
def getContentObj(self):
try: return Content
except: raise DbError("Can't find getContentObj object")
def getUrlSearchObj(self):
try: return UrlSearch
except: raise DbError("Can't find UrlSearch object")
def getSearchViewCriteriaObj(self):
try: return SearchViewcriteria
except: raise DbError("Can't find SearchViewCriteria object")
def getUrlSearchObj(self):
try: return UrlSearch
except: raise DbError("Can't find UrlSearch object")
def _set_logger(self):
#Our friend the logger. Sets up the logging parameters. The log will appear at ./logs/master.log (or whatever is in the settings
# at the top of this module).
LOGDIR = os.path.join(os.path.dirname(__file__), 'logs').replace('\\','/')
log_filename = LOGDIR + '/' + LOG_NAME
logging.basicConfig(level=LOG_LEVEL,
format='%(module)s %(funcName)s %(lineno)d %(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
filename=log_filename,
filemode='w')
class testDbRoutines(unittest.TestCase):
def __init__(self):
#some randomized urls (keeps unique constraint from interfering)
self._urls = ["xyz.html"+ str(random.random()), "http://www.dogma.com"+ str(random.random()),
"http://www.dogma.com/test/"+ str(random.random()), "http://www.dogma.com/test?id=123"+ str(random.random())]
self._urls1 = ["xyz.html"+ str(random.random()), "http://www.dogma.com"+ str(random.random()),
"http://www.dogma.com/test/"+ str(random.random()), "http://www.dogma.com/test?id=123"+ str(random.random())]
def testAddUrlsFromList(self):
#add a bunch of urls from a list
clsObj = DatabaseMethods()
ret = clsObj.addUrlsFromList(self._urls, deleteMe = True, searchId = 0, source = 'test')
assert ret is RETURN_SUCCESS
def testDeleteUrls(self):
#delete rows with deleteMe flag set
clsObj = DatabaseMethods()
ret = clsObj.deleteFlaggedUrls()
assert ret is RETURN_SUCCESS
def testAddCatchFromUrlVisit(self):
#
clsObj = DatabaseMethods()
searchObj = clsObj.getSearchObj()
urlSearchObj = clsObj.getUrlSearchObj()
#add placeholder search, url and urlSearch database entries
fakeSearch = searchObj(include = "michigan, football", exclude = "osu", deleteMe = True)
fakeUrlId = clsObj.addSingleUrl(url = "http://fake"+str(random.random()), deleteMe = True, searchId = fakeSearch.id, source = 'test')
urlSearchObj(urlid = fakeUrlId, searchid = fakeSearch.id)
#see if we can add a fake return from a url to the content table
fakeCont = ['and a one'+str(random.random()), 'and a two'+str(random.random()), 'and a three'+str(random.random())]
fakeDates= [datetime.now(), datetime.now(), datetime.now()]
fakeLinks= ['http://www.dogma.com'+str(random.random()), 'http://www.karma.com'+str(random.random()), 'http://www.fang.com'+str(random.random())]
fakeMeta = [(u'http-equiv', u'Content-Type'), (u'content', u'text/html; charset=%SOUP-ENCODING%')]
fakeDict = {'polishedCont': fakeCont, 'dates':fakeDates, 'links': fakeLinks, 'metaData':fakeMeta,'contentAsSoupObjects': fakeCont}
ret = clsObj.addCatchFromUrlVisit(urlid = fakeUrlId, searchid = fakeSearch.id, deleteMe=True, catchDict = fakeDict)
clsObj.deleteFlaggedSearches()
assert ret == RETURN_SUCCESS
def installSomeFakeRecords(self):
srch = Search(include = 'xx', exclude = 'yy, yy, yy', clearAll = False, clearNonconform = True, viewcriteriaid = 1, andOr = 'or', deleteMe = True)
srchview = SearchViewcriteria(searchid = srch.id, include = 'xx', exclude = 'yy', andOr = 'and')
url = Url(url = 'http://www.python.org/?'+str(random.random()), addDate = datetime.now(), urlOrder = 1, deleteMe = True, visitDate = datetime.now(), source = 'xxxx')
urlsrch = UrlSearch(urlid = url.id, searchid = srch.id)
urltags = UrlTags(urlid = url.id, name = 'xxx', value = 'yyy')
urlhtml = UrlHtml(urlid = url.id, html = "test html")
cont = Content(content = "mycontent"+str(random.random()), dateAcquired = datetime.now(), datePosted = datetime.now(), shortCont = "shortCont"+str(random.random()))
contsrch = ContentSearch(searchid = srch.id, urlid = url.id, contentid = cont.id)
return {'url': url.id, 'srch': srch.id, 'cont': cont.id}
def testDatabaseReturnObjects(self):
#this module returns a bunch of (sometimes nested) query results; we'll add some fake content then return it
fakeDict = self.installSomeFakeRecords()
url = fakeDict['url']
srch = fakeDict['srch']
cont = fakeDict['cont']
#now return it
methodCls = DatabaseMethods()
logging.debug("results from testDatabaseReturnObjects")
urls4srch =methodCls.getUrlsForSearch(srch)
for u in urls4srch:
logging.debug('urls %s' %str(u))
urls4srch =methodCls.getUrlsForSearch(srch, limit = 1)
for u in urls4srch:
logging.debug('urls with limit %s' %str(u))
cont4srch = methodCls.getContentForSearch(srch)
for u in cont4srch:
logging.debug('cont %s' %str(u))
html4url = methodCls.getHtmlForUrl(url)
for u in html4url:
logging.debug('html for url %s' %str(u))
html4srch = methodCls.getHtmlForSearch(srch)
for u in html4srch:
logging.debug('html for search %s'%str(u))
def testReferentialIntegrity(self):
#tests referential integrity of the database; first by adding a complete set of records then deleting the top level object
# attempts to access dependent objects should fail if the cascading deletes work as expected
clsObj = DatabaseMethods()
#
deleteMe = True; include = 'michigan, football'; exclude = 'ohio state'; urlname = 'urlname'
search = clsObj.Search(name = 'test', include = include, exclude = exclude, deleteMe = True)
criteria = clsObj.SearchViewcriteria(searchid = search.id, include = search.include, exclude = search.exclude, andOr = search.andOr)
url1 = clsObj.Url(url = urlname + str(random.random()))
urlsrch1 = clsObj.UrlSearch(urlid = url1.id, searchid = search.id)
url2 = clsObj.Url(url = urlname + str(random.random()))
urlsrch2 = clsObj.UrlSearch(urlid = url2.id, searchid = search.id)
cont = clsObj.Content(content = 'cont' + str(random.random()), shortCont = 'short' + str(random.random()))
contsrch1 = clsObj.ContentSearch(urlid = url1.id, contentid = cont.id, searchid = search.id)
clsObj.Url.delete(url1.id)
#These should all fail on the first command of each try block. If not, it should raise an error
try:
dbRoutines.Url.get(url1.id)
raise WrapperError("DB Integrity failed")
except:
pass
try:
dbRoutines.UrlSearch.get(urlsrch1.id)
raise WrapperError("DB Integrity failed")
except:
pass
try:
dbRoutines.ContentSearch.get(contsrch1.id)
raise WrapperError("DB Integrity failed")
except:
pass
try:
dbRoutines.Content.get(cont.id)
raise WrapperError("DB Integrity failed")
except:
pass
pass
if __name__=='__main__':
#instansiate the class object
test = testDbRoutines()
#run the tests
'''these all work
test.testDatabaseReturnObjects()
test.testAddUrlsFromList()
test.testAddCatchFromUrlVisit()
test.testDeleteUrls()
'''
a=1
|
pbarton666/buzz_bot
|
djangoproj/djangoapp/b_dbRoutines.py
|
Python
|
mit
| 30,740
|
[
"VisIt"
] |
76fe5b07e5671681ffaa5e975b0f286009458bfa932efedc151603bc64077afb
|
r"""
Package handling SV solvers in yaplf.
Package yaplf.algorithms.svm.solvers contains all the classes handling solvers
in SV learning algorithms. A solver is specialized in finding the solution of
one of the peculiar constrained optimization problems rising when dealing with
SV algorithms.
TODO:
- SV regression solvers
- Accuracy SV regression solvers
- pep8 checked
- pylint score: 6.95
AUTHORS:
- Dario Malchiodi (2010-02-15): initial version.
- Dario Malchiodi (2010-04-06): added ``SVMClassificationSolver``,
``PyMLClassificationSolver``.
- Dario Malchiodi (2010-04-12): added ``CVXOPTVQClassificationSolver``,
- Dario Malchiodi (2014-01-20): added ``GurobiClassificationSolver``.
"""
#*****************************************************************************
# Copyright (C) 2010 Dario Malchiodi <malchiodi@dsi.unimi.it>
#
# This file is part of yaplf.
# yaplf is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
# yaplf is distributed in the hope that it will be useful, but without any
# warranty; without even the implied warranty of merchantability or fitness
# for a particular purpose. See the GNU Lesser General Public License for
# more details.
# You should have received a copy of the GNU Lesser General Public License
# along with yaplf; if not, see <http://www.gnu.org/licenses/>.
#
#*****************************************************************************
import xmlrpclib
from numpy import eye, array, transpose
from yaplf.models.kernel import LinearKernel
try:
from cvxopt import solvers
from cvxopt.base import matrix as cvxopt_matrix
except ImportError:
#print "Warning: no cvxopt package"
pass
try:
from PyML import VectorDataSet, SVM
except ImportError:
#print "Warning: no PyML package"
pass
try:
import gurobipy
except ImportError:
print 'Warning: no gurobipy package'
from yaplf.utility import chop, kronecker_delta
class SVMClassificationSolver(object):
r"""
Base class for classification solvers. Subclasses should implement a
``solve`` method having in input a list/tuple of ``LabeledSample``
instances, a positive float value and a ``Kernel`` subclass instance. This
method should build the corresponding quadratic constrained optimization
problem, solve it, and return the optimal solution.
INPUT
Each subclass can have different constructor inputs in order to take into
account specific initialization values.
OUTPUT
SVMClassificationSolver instance
EXAMPLES
See the examples section for concrete subclasses, such as
``CVXOPTClassificationSolver`` in this package.
AUTHORS
- Dario Malchiodi (2010-04-06)
"""
def __init__(self):
r"""
See ``ClassificationSolver`` for full documentation.
"""
pass
def solve(self, sample, c, kernel):
r"""
Solve the SVM classification optimization problem corresponding
to the supplied sample, according to specified value for the tradeoff
constant `C` and the kernel in ``kernel``.
INPUT:
- ``sample`` -- list or tuple of ``LabeledExample`` instances whose
labels are all set either to `1` or `-1`.
- ``c`` -- float or None (the former choice selects the
soft-margin version of the algorithm) value for the tradeoff constant
`C`.
- ``kernel`` -- ``Kernel`` instance defining the kernel to be used.
OUTPUT:
list of float values -- optimal values for the optimization problem.
EXAMPLES:
See the example section for concrete subclasses such as
``CVXOPTClassificationSolver``.
"""
raise NotImplementedError('solve not callable in base class')
class GurobiClassificationSolver(SVMClassificationSolver):
r"""
SVM Classification solver based on gurobi. This solver is specialized in
finding the approximate solution of the optimization problem described in
[Cortes and Vapnik, 1995], both in its original and soft-margin
formulation.
INPUT:
- ``self`` -- object on which the function is invoked.
- ``verbose`` -- boolean (default: ``False``) flag triggering verbose mode.
OUTPUT:
``GurobiClassificationSolver`` object.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``GurobiClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers import \
... GurobiClassificationSolver
>>> s = GurobiClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve``function, specifying a labeled sample such as ``and_sample``, a
positive value for the constant `c` and a kernel instance in order
to get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 0.999999999992222, 0.999999999992222]
The value for `c` can be set to ``float('inf')``, in order to build and
solve the original optimization problem rather than the soft-margin
formulation:
::
>>> s.solve(and_sample, float('inf'), LinearKernel())
[4.00000000000204, 0, 1.999999999976717, 1.99999999997672]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
REFERENCES:
[Cortes and Vapnik, 1995] Corinna Cortes and Vladimir Vapnik,
Support-Vector Networks, Machine Learning 20 (1995), 273--297.
AUTHORS:
- Dario Malchiodi (2014-01-20)
"""
def __init__(self, verbose=False):
r"""
See ``GurobiClassificationSolver`` for full documentation.
"""
try:
gurobipy.os
except NameError:
raise NotImplementedError("gurobipy package not available")
SVMClassificationSolver.__init__(self)
self.verbose = verbose
def solve(self, sample, c=float('inf'), kernel=LinearKernel(),
tolerance=1e-6):
r"""
Solve the SVM classification optimization problem corresponding
to the supplied sample, according to specified value for the tradeoff
constant `C`.
INPUT:
- ``sample`` -- list or tuple of ``LabeledExample`` instances whose
labels are all set either to `1` or `-1`.
- ``c`` -- float value for the tradeoff constant `C`.
``float('inf')`` selects the soft-margin version of the algorithm)
- ``kernel`` -- ``Kernel`` instance defining the kernel to be used.
- ``tolerance`` -- tolerance to be used when clipping values to the
extremes of an interval.
OUTPUT:
list of float values -- optimal values for the optimization problem.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``GurobiClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers \
... import GurobiClassificationSolver
>>> s = GurobiClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve`` function, specifying a labeled sample such as ``and_sample``,
a positive value for the constant `C` and a kernel instance in order to
get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 0.999999999992222, 0.999999999992222]
The value for `C` can be set to ``float('inf')`` (which is also its
default value), in order to build and solve the original optimization
problem rather than the soft-margin formulation:
::
>>> s.solve(and_sample, float('inf'), LinearKernel())
[4.00000000000204, 0, 1.999999999976717, 1.99999999997672]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
AUTHORS:
- Dario Malchiodi (2014-01-20)
"""
m = len(sample)
patterns = [e.pattern for e in sample]
labels = [e.label for e in sample]
model = gurobipy.Model('classify')
for i in range(m):
if c == float('inf'):
model.addVar(name='alpha_%d' % i, lb=0,
vtype=gurobipy.GRB.CONTINUOUS)
else:
model.addVar(name='alpha_%d' % i, lb=0, ub=c,
vtype=gurobipy.GRB.CONTINUOUS)
model.update()
alphas = model.getVars()
obj = gurobipy.QuadExpr() + sum(alphas)
map(lambda (i, j):
obj.add(alphas[i] * alphas[j] * labels[i] * labels[j] *
kernel.compute(patterns[i], patterns[j]), -0.5),
[(i, j) for i in xrange(m) for j in xrange(m)])
model.setObjective(obj, gurobipy.GRB.MAXIMIZE)
constEqual = gurobipy.LinExpr()
map(lambda x: constEqual.add(x, 1.0),
[a*l for a, l in zip(alphas, labels)])
model.addConstr(constEqual, gurobipy.GRB.EQUAL, 0)
if not self.verbose:
model.setParam('OutputFlag', False)
model.optimize()
alphas_opt = [chop(a.x, right=c, tolerance=tolerance) for a in alphas]
return alphas_opt
class CVXOPTClassificationSolver(SVMClassificationSolver):
r"""
SVM Classification solver based on cvxopt. This solver is specialized in
finding the approximate solution of the optimization problem described in
[Cortes and Vapnik, 1995], both in its original and soft-margin
formulation.
INPUT:
- ``self`` -- object on which the function is invoked.
- ``verbose`` -- boolean (default: ``False``) flag triggering verbose mode.
- ``max_iterations`` -- integer (default: `1000`) maximum number of solver
iterations.
- ``solver`` -- string (default: ``'mosek'``) cvxopt solver to be used.
OUTPUT:
``CVXOPTClassificationSolver`` object.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``CVXOPTClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers import \
... CVXOPTClassificationSolver
>>> s = CVXOPTClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve``function, specifying a labeled sample such as ``and_sample``, a
positive value for the constant `c` and a kernel instance in order
to get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 0.9999998669645057, 0.9999998669645057]
The value for `c` can be set to ``float('inf')`` (the default
value), in order to build and solve the original optimization problem
rather than the soft-margin formulation:
::
>>> s.solve(and_sample, float('inf'), LinearKernel())
[4.000001003300218, 0, 2.000000364577095, 2.000000364577095]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
REFERENCES:
[Cortes and Vapnik, 1995] Corinna Cortes and Vladimir Vapnik,
Support-Vector Networks, Machine Learning 20 (1995), 273--297.
AUTHORS:
- Dario Malchiodi (2010-02-22)
"""
def __init__(self, **kwargs):
r"""
See ``CVXOPTClassificationSolver`` for full documentation.
"""
try:
solvers.options
except NameError:
raise NotImplementedError("cvxopt package not available")
try:
self.verbose = kwargs['verbose']
except KeyError:
self.verbose = False
try:
self.max_iterations = kwargs['max_iterations']
except KeyError:
self.max_iterations = 1000
try:
self.solver = kwargs['solver']
except KeyError:
self.solver = 'mosek'
SVMClassificationSolver.__init__(self)
def solve(self, sample, c=float('inf'), kernel=LinearKernel()):
r"""
Solve the SVM classification optimization problem corresponding
to the supplied sample, according to specified value for the tradeoff
constant `C`.
INPUT:
- ``sample`` -- list or tuple of ``LabeledExample`` instances whose
labels are all set either to `1` or `-1`.
- ``c`` -- float or ``float('inf')`` (the former choice selects the
soft-margin version of the algorithm) value for the tradeoff constant
`C`.
- ``kernel`` -- ``Kernel`` instance defining the kernel to be used
(default value: ``LinearKernel()``, using a linear kernel)
OUTPUT:
list of float values -- optimal values for the optimization problem.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``CVXOPTClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers \
... import CVXOPTClassificationSolver
>>> s = CVXOPTClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve``function, specifying a labeled sample such as ``and_sample``,
a positive value for the constant `C` and a kernel instance in order to
get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 0.9999998669645057, 0.9999998669645057]
The value for `C` can be set to ``float('inf')``, in order to build
and solve the original optimization problem rather than the
soft-margin formulation:
::
>>> s.solve(and_sample, float('inf'), LinearKernel())
[4.000001003300218, 0, 2.000000364577095, 2.000000364577095]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
AUTHORS:
- Dario Malchiodi (2010-02-22)
"""
solvers.options['show_progress'] = self.verbose
solvers.options['maxiters'] = self.max_iterations
solvers.options['solver'] = self.solver
# cvxopt solves the problem
# min 1/2 x' Q x + p' x
# subject to G x >= h and A x = b
# dict below is mapped to the above symbols as follows:
# problem["obj_quad"] -> Q
# problem["obj_lin"] -> p
# problem["ineq_coeff"] -> G
# problem["ineq_const"] -> h
# problem["eq_coeff"] -> A
# problem["eq_const"] -> b
num_examples = len(sample)
problem = {}
problem["obj_quad"] = cvxopt_matrix(
[[elem_i.label * elem_j.label *
kernel.compute(elem_i.pattern, elem_j.pattern)
for elem_i in sample] for elem_j in sample])
problem["obj_lin"] = cvxopt_matrix([-1.0] * num_examples)
if c == float('inf'):
problem["ineq_coeff"] = cvxopt_matrix(-1.0 * eye(num_examples))
problem["ineq_const"] = cvxopt_matrix([0.0] * num_examples)
else:
problem["ineq_coeff"] = cvxopt_matrix([
[-1.0 * kronecker_delta(i, j) for i in range(num_examples)]
+ [kronecker_delta(i, j) for i in range(num_examples)]
for j in range(num_examples)])
problem["ineq_const"] = cvxopt_matrix(
[float(0.0)] * num_examples + [float(c)] * num_examples
)
# coercion to float in the following assignment is required
# in order to work with sage notebooks
problem["eq_coeff"] = cvxopt_matrix(
[float(elem.label) for elem in sample], (1, num_examples)
)
problem["eq_const"] = cvxopt_matrix(0.0)
# was
# sol = solvers.qp(quad_coeff, lin_coeff, ineq_coeff, ineq_const, \
# eq_coeff, eq_const)
sol = solvers.qp(problem["obj_quad"], problem["obj_lin"],
problem["ineq_coeff"], problem["ineq_const"],
problem["eq_coeff"], problem["eq_const"])
if sol["status"] != 'optimal':
raise ValueError('cvxopt returned status ' + sol["status"])
# was
# alpha = map(lambda x: chop(x, right = c), list(sol['x']))
alpha = [chop(x, right=c) for x in list(sol['x'])]
return alpha
class PyMLClassificationSolver(SVMClassificationSolver):
r"""
SVM Classification solver based on PyML. This solver is specialized in
finding the approximate solution of the optimization problem described in
[Cortes and Vapnik, 1995], both in its original and soft-margin
formulation.
INPUT:
- ``self`` -- object on which the function is invoked.
- ``verbose`` -- boolean (default: ``False``) flag triggering verbose mode.
OUTPUT:
``SVMClassificationSolver`` object.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``PyMLClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers \
... import PyMLClassificationSolver
>>> s = PyMLClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve`` function, specifying a labeled sample such as ``and_sample``, a
positive value for the constant `C` and a kernel instance in order to get
the solution of the corresponding SV classification optimization problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> alphas = s.solve(and_sample, 2, LinearKernel()) # doctest:+ELLIPSIS
Cpos, Cneg...
>>> print alphas
[2.0, 0.0, 1.0, 1.0]
The value for `C` can be set to ``None``, in order to build and solve the
original optimization problem rather than the soft-margin formulation:
::
>>> alphas = s.solve(and_sample, None, LinearKernel()) # doctest:+ELLIPSIS
Cpos, Cneg...
>>> print alphas
[3.984375, 0.0, 1.9921875, 1.9921875]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
REFERENCES:
[Cortes and Vapnik, 1995] Corinna Cortes and Vladimir Vapnik,
Support-Vector Networks, Machine Learning 20 (1995), 273--297.
AUTHORS:
- Dario Malchiodi (2010-04-06)
"""
def __init__(self):
r"""
See ``PyMLClassificationSolver`` for full documentation.
"""
try:
SVM()
except NameError:
raise NotImplementedError("PyML package not available")
SVMClassificationSolver.__init__(self)
def solve(self, sample, c, kernel):
r"""
Solve the SVM classification optimization problem corresponding
to the supplied sample, according to specified value for the tradeoff
constant `C`.
INPUT:
- ``sample`` -- list or tuple of ``LabeledExample`` instances whose
labels are all set either to `1` or `-1`.
- ``c`` -- float or None (the former choice selects the
soft-margin version of the algorithm) value for the tradeoff constant
`C`.
- ``kernel`` -- ``Kernel`` instance defining the kernel to be used.
OUTPUT:
list of float values -- optimal values for the optimization problem.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``PyMLClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers \
... import PyMLClassificationSolver
>>> s = PyMLClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve``function, specifying a labeled sample such as ``and_sample``,
a positive value for the constant `C` and a kernel instance in order to
get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> alphas = s.solve(and_sample, 2, LinearKernel()) # doctest:+ELLIPSIS
Cpos, Cneg...
>>> print alphas
[2.0, 0.0, 1.0, 1.0]
The value for `C` can be set to ``None``, in order to build and solve
the original optimization problem rather than the soft-margin
formulation:
::
>>> alphas = s.solve(and_sample, None, LinearKernel()) # doctest:+ELLIPSIS
Cpos, Cneg...
>>> print alphas
[3.984375, 0.0, 1.9921875, 1.9921875]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
AUTHORS:
- Dario Malchiodi (2010-04-06)
"""
patterns = array([[float(p) for p in e.pattern] for e in sample])
# was
# patterns = array([map(float, e.pattern) for e in sample])
labels = array([float(e.label) for e in sample])
data = VectorDataSet(patterns, L=labels)
if kernel.__class__.__name__ == 'LinearKernel':
pass
elif kernel.__class__.__name__ == 'GaussianKernel':
data.attachKernel('gaussian',
gamma=float(1.0 / (kernel.sigma ** 2)))
elif kernel.__class__.__name__ == 'PolynomialKernel':
data.attachKernel('poly', degree=int(kernel.degree),
additiveConst=float(1))
elif kernel.__class__.__name__ == 'HomogeneousPolynomialKernel':
data.attachKernel('poly', degree=int(kernel.degree),
additiveConst=float(0))
else:
raise NotImplementedError(str(kernel) + 'not implemented in PyML')
solver = SVM(Cmode='equal')
solver.C = (float(c) if c is not None else 100000000.)
solver.train(data, saveSpace=False)
alphas = [0.0] * len(sample)
for index, value in transpose([solver.model.svID, solver.model.alpha]):
alphas[int(index)] = abs(value)
return alphas
class NEOSClassificationSolver(SVMClassificationSolver):
r"""
SVM Classification solver based on cvxopt. This solver is specialized in
finding the approximate solution of the optimization problem described in
[Cortes and Vapnik, 1995], both in its original and soft-margin
formulation.
INPUT:
- ``self`` -- object on which the function is invoked.
- ``verbose`` -- boolean (default: ``False``) flag triggering verbose mode.
OUTPUT:
``NEOSClassificationSolver`` object.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``NEOSClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers import \
... NEOSClassificationSolver
>>> s = NEOSClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve`` function, specifying a labeled sample such as ``and_sample``, a
positive value for the constant `c` and a kernel instance in order
to get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 1.0, 1.0]
The value for `c` can be set to ``float('inf')``, in order to build and
solve the original optimization problem rather than the soft-margin
formulation:
::
>>> alphas = s.solve(and_sample, float('inf'), LinearKernel()) # doctest:+ELLIPSIS
...
>>> print alphas
[4.0, 0, 2.0, 2.0]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
REFERENCES:
[Cortes and Vapnik, 1995] Corinna Cortes and Vladimir Vapnik,
Support-Vector Networks, Machine Learning 20 (1995), 273--297.
AUTHORS:
- Dario Malchiodi (2011-02-05)
"""
def __init__(self, **kwargs):
r"""
See ``NEOSClassificationSolver`` for full documentation.
"""
try:
self.verbose = kwargs['verbose']
except KeyError:
self.verbose = False
SVMClassificationSolver.__init__(self)
def solve(self, sample, c=float('inf'), kernel=LinearKernel()):
r"""
Solve the SVM classification optimization problem corresponding
to the supplied sample, according to specified value for the tradeoff
constant `C`.
INPUT:
- ``sample`` -- list or tuple of ``LabeledExample`` instances whose
labels are all set either to `1` or `-1`.
- ``c`` -- float or ``float('inf')`` (the former choice selects the
soft-margin version of the algorithm) value for the tradeoff constant
`C`.
- ``kernel`` -- ``Kernel`` instance defining the kernel to be used
(default: ``LinearKernel()``, accounting for a linear kernel).
OUTPUT:
list of float values -- optimal values for the optimization problem.
EXAMPLES:
Consider the following representation of the AND binary function, and a
default instantiation for ``NEOSClassificationSolver``:
::
>>> from yaplf.data import LabeledExample
>>> and_sample = [LabeledExample((1, 1), 1),
... LabeledExample((0, 0), -1), LabeledExample((0, 1), -1),
... LabeledExample((1, 0), -1)]
>>> from yaplf.algorithms.svm.classification.solvers \
... import NEOSClassificationSolver
>>> s = NEOSClassificationSolver()
Once the solver instance is available, it is possible to invoke its
``solve``function, specifying a labeled sample such as ``and_sample``,
a positive value for the constant `C` and a kernel instance in order to
get the solution of the corresponding SV classification optimization
problem:
::
>>> from yaplf.models.kernel import LinearKernel
>>> s.solve(and_sample, 2, LinearKernel())
[2, 0, 1.0, 1.0]
The value for `C` can be set to ``float('inf')``, in order to build
and solve the original optimization problem rather than the
soft-margin formulation:
::
>>> s.solve(and_sample, float('inf'), LinearKernel())
[4.0, 0, 2.0, 2.0]
Note however that this class should never be used directly. It is
automatically used by ``SVMClassificationAlgorithm``.
AUTHORS:
- Dario Malchiodi (2011-02-05)
"""
neos = xmlrpclib.Server("http://%s:%d" % ("www.neos-server.org", 3332))
num_examples = len(sample)
input_dimension = len(sample[0].pattern)
constraint = " <= " + str(c) if c != float('inf') else ""
kernel_description = AMPLKernelFactory(kernel).get_kernel_description()
# that is, something like sum{k in 1..n}(x[i,k]*x[j,k])
pattern_description = ["param x:\t"]
label_description = ["param y:=\n"]
for component_index in range(input_dimension):
pattern_description.append(str(component_index+1))
pattern_description.append("\t")
pattern_description.append(":=\n")
example_number = 1
for example in sample:
pattern_description.append(str(example_number))
for component in example.pattern:
pattern_description.append("\t")
pattern_description.append(str(component))
label_description.append(str(example_number))
label_description.append("\t")
label_description.append(str(sample[example_number-1].label))
example_number = example_number + 1
if example_number > len(sample):
pattern_description.append(";")
label_description.append(";")
pattern_description.append("\n")
label_description.append("\n")
xml = """
<document>
<category>nco</category>
<solver>SNOPT</solver>
<inputMethod>AMPL</inputMethod>
<model><![CDATA[
param m integer > 0 default %d; # number of sample points
param n integer > 0 default %d; # sample space dimension
param x {1..m,1..n}; # sample points
param y {1..m}; # sample labels
param dot{i in 1..m,j in 1..m}:=%s;
var alpha{1..m} >=0%s;
maximize quadratic_form:
sum{i in 1..m} alpha[i]
-1/2*sum{i in 1..m,j in 1..m}alpha[i]*alpha[j]*y[i]*y[j]*dot[i,j];
subject to linear_constraint:
sum{i in 1..m} alpha[i]*y[i]=0;
]]></model>
<data><![CDATA[
data;
%s
%s
]]></data>
<commands><![CDATA[
option solver snopt;
solve;
printf: "(";
printf {i in 1..m-1}:"%%f,",alpha[i];
printf: "%%f)",alpha[m];
]]></commands>
</document>
""" % (num_examples, input_dimension, kernel_description, constraint,
"".join(pattern_description), "".join(label_description))
(job_number, password) = neos.submitJob(xml)
if self.verbose:
print xml
print "job number: %s" % job_number
offset = 0
status = ""
while status != "Done":
(msg, offset) = neos.getIntermediateResults(job_number, password,
offset)
if self.verbose:
print msg.data
status = neos.getJobStatus(job_number, password)
msg = neos.getFinalResults(job_number, password).data
if self.verbose:
print msg
begin = 0
while msg[begin] != '(':
begin = begin + 1
end = len(msg) - 1
while msg[end] != ')':
end = end - 1
return [chop(alpha, right=c) for alpha in eval(msg[begin:end+1])]
class AMPLKernelFactory(object):
r"""
Factory class used in order to get a string containing the AMPL
source code description for a given kernel.
"""
def __init__(self, kernel):
self.kernel = kernel
def get_kernel_description(self):
if self.kernel.__class__.__name__ == "LinearKernel":
return "sum{k in 1..n}(x[i,k]*x[j,k])"
elif self.kernel.__class__.__name__ == "PolynomialKernel":
return "(sum{k in 1..n}x[i,k]*x[j,k]+1)^" + str(self.kernel.degree)
elif self.kernel.__class__.__name__ == "HomogeneousPolynomialKernel":
return "(sum{k in 1..n}x[i,k]*x[j,k])^" + str(self.kernel.degree)
elif self.kernel.__class__.__name__ == "GaussianKernel":
return "exp(-1*(sum{k in 1..n}(x[i,k]-x[j,k])^2)/(2*" + \
str(self.kernel.sigma ** 2) + "))"
elif self.kernel.__class__.__name__ == "HyperbolicKernel":
return "tanh(" + str(self.kernel.scale) + \
" * (sum{k in 1..n}x[i,k]*x[j,k]) + " + \
str(self.kernel.offset) + ")"
else:
raise ValueError(str(self.kernel)
+ 'not analytically representable')
# Needed in order to use cvxopt within sage
Integer = int
RealNumber = float
|
dariomalchiodi/yaplf
|
yaplf/algorithms/svm/classification/solvers.py
|
Python
|
lgpl-3.0
| 33,875
|
[
"Gaussian"
] |
bf555ea1ad6b33a2b37a39cd3130adb49963eee54d9e5c8bcf30ffb4651f904e
|
from ase import *
from gpaw import *
from ase.transport.calculators import TransportCalculator as TC
from gpaw.lcao.tools import get_lead_lcao_hamiltonian, remove_pbc
"""
1. calculate the transmission function of Sodium bulk with one atom in the
unit cell and sampling over (n, m) k-points in the transverse ibz.
2. calculate the transmission function of that same system repeated n, m
times in the transverse directions and where the transverse bz is sampled
by the gamma point only.
Ideally the two transmission functions should be the same.
"""
L = 3.0 # Na binding length
direction = 'x'
dir = 'xyz'.index(direction)
energies = np.arange(-5, 10, 0.2)
def get_trans(h, s):
return TC(energies=energies, h=h, s=s, h1=h, s1=s, h2=h, s2=s,
align_bf=0).get_transmission()
def get_hs(natoms, nkpts):
calc = GPAW(h=0.25, mode='lcao', basis='sz', width=0.2, kpts=nkpts,
mixer=Mixer(0.1, 5, weight=80.0), usesymm=False)
atoms = Atoms('Na', pbc=True, cell=(L, L, L)).repeat(natoms)
atoms.set_calculator(calc)
atoms.get_potential_energy()
fermi = calc.get_fermi_level()
ibz, w, h, s = get_lead_lcao_hamiltonian(calc, direction=direction)
h = h[0] - fermi * s
for h1, s1 in zip(h, s):
remove_pbc(atoms, h1, s1, d=dir)
return ibz, w, h, s
# First with transverse kpts
ibz1_kc, w1_k, h1_kmm, s1_kmm = get_hs(natoms=(3, 1, 1), nkpts=(4, 3, 3))
T1_k = [get_trans(h, s) for h, s in zip(h1_kmm, s1_kmm)]
T1 = np.dot(w1_k, T1_k)
# Second without transverse kpts
ibz2_kc, w2_k, h2_kmm, s2_kmm = get_hs(natoms=(3, 3, 3), nkpts=(4, 1, 1))
T2_k = [get_trans(h, s) for h, s in zip(h2_kmm, s2_kmm)]
T2 = np.dot(w2_k, T2_k)
if 1:
import pylab as pl
pl.plot(energies, T1, 'r--', label='With trans kpts')
pl.plot(energies, T2 / (3 * 3), 'b:', label='Without trans kpts')
pl.axis('tight')
pl.legend()
pl.show()
|
qsnake/gpaw
|
oldtest/transport_transverse_k2.py
|
Python
|
gpl-3.0
| 1,907
|
[
"ASE",
"GPAW"
] |
d25a32068ae8a3f2957dcf56d52f8d7d7bb6bee94992987effe189f3fed659e2
|
""" Collection of DIRAC useful adler32 related tools.
By default on Error they return None.
.. warning::
On error False is returned.
.. warning::
All exceptions report to the stdout.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__RCSID__ = "$Id$"
import six
from zlib import adler32
def intAdlerToHex(intAdler):
"""Change adler32 checksum base from decimal to hex.
:param integer intAdler: adler32 checksum
:return: 8 digit hex string
"""
try:
# Will always be 8 hex digits made from a positive integer
return hex(intAdler & 0xffffffff).lower().replace('l', '').replace('x', '0000')[-8:]
except Exception as error:
print(repr(error).replace(',)', ')'))
return False
def hexAdlerToInt(hexAdler, pos=True):
"""Change hex base to decimal for adler32 checksum.
:param mixed hexAdler: hex based adler32 checksum integer or a string
:param boolean pos: flag to determine sign (default True = positive)
"""
if isinstance(hexAdler, six.integer_types):
return hexAdler & 0xffffffff
# First make sure we can parse the hex properly
if hexAdler == 'False' or hexAdler == '-False':
return False
hexAdler = hexAdler.lower().replace('l', '')
hexAdler = hexAdler[-8:]
hexAdler = hexAdler.replace('x', '0')
if not pos:
hexAdler = "-%s" % hexAdler
try:
# Will always try to return the positive integer value of the provided hex
return int(hexAdler, 16) & 0xffffffff
except Exception as error:
print(repr(error).replace(',)', ')'))
return False
def compareAdler(adler1, adler2):
"""Check equality between two adler32 checksums.
:param adler1: 1st checksum
:param adler2: 2nd checksum
:return: True (False) if cheksums are (not) equal
"""
adler1s = [hexAdlerToInt(adler1), hexAdlerToInt(adler1, False)]
if False in adler1s:
adler1s.remove(False)
adler2s = [hexAdlerToInt(adler2), hexAdlerToInt(adler2, False)]
if False in adler2s:
adler2s.remove(False)
for adler1 in adler1s:
if adler1 in adler2s:
return True
return False
def fileAdler(fileName):
"""Calculate alder32 checksum of the supplied file.
:param str fileName: path to file
"""
def readChunk(fd, size=1048576):
"""Return data from file descriptor in chunk of size size.
:param fd: file descriptor
:param integer size: size of data chunk in bytes (default 1024 * 1024 = 1048576)
"""
while True:
data = fd.read(size)
if not data:
break
yield data
try:
with open(fileName, "rb") as inputFile:
myAdler = 1
for data in readChunk(inputFile):
myAdler = adler32(data, myAdler)
return intAdlerToHex(myAdler)
except Exception as error:
print(repr(error).replace(',)', ')'))
return False
def stringAdler(string):
"""Calculate adler32 of the supplied string.
:param str string: data
"""
try:
intAdler = adler32(string.encode())
return intAdlerToHex(intAdler)
except Exception as error:
print(repr(error).replace(',)', ')'))
return False
if __name__ == "__main__":
import sys
for p in sys.argv[1:]:
print("%s : %s" % (p, fileAdler(p)))
|
yujikato/DIRAC
|
src/DIRAC/Core/Utilities/Adler.py
|
Python
|
gpl-3.0
| 3,230
|
[
"DIRAC"
] |
132cc15e8abe9529a0e362777f768357d65f93f931d66518c7f14e891294cf78
|
""" Code that calculates clutter by using running stats. """
from copy import deepcopy
from distributed import Client, LocalCluster
from .config import get_field_names
import warnings
import numpy as np
import pyart
try:
from dask import delayed
import dask.array as da
except ImportError:
warnings.warn('Dask is not installed. Radar clutter module'
+ ' needs Dask to be able to run.')
pass
def tall_clutter(files, config,
clutter_thresh_min=0.0002,
clutter_thresh_max=0.25, radius=1,
write_radar=True, out_file=None,
use_dask=False):
"""
Wind Farm Clutter Calculation
Parameters
----------
files : list
List of radar files used for the clutter calculation.
config : str
String representing the configuration for the radar.
Such possible configurations are listed in default_config.py
Other Parameters
----------------
clutter_thresh_min : float
Threshold value for which, any clutter values above the
clutter_thres_min will be considered clutter, as long as they
are also below the clutter_thres_max.
clutter_thresh_max : float
Threshold value for which, any clutter values below the
clutter_thres_max will be considered clutter, as long as they
are also above the clutter_thres_min.
radius : int
Radius of the area surrounding the clutter gate that will
be also flagged as clutter.
write_radar : bool
Whether to or not, to write the clutter radar as a netCDF file.
Default is True.
out_file : string
String of location and filename to write the radar object too,
if write_radar is True.
use_dask : bool
Use dask instead of running stats for calculation. The will reduce
run time.
Returns
-------
clutter_radar : Radar
Radar object with the clutter field that was calculated.
This radar only has the clutter field, but maintains all
other radar specifications.
"""
field_names = get_field_names(config)
refl_field = field_names["reflectivity"]
vel_field = field_names["velocity"]
ncp_field = field_names["normalized_coherent_power"]
def get_reflect_array(file, first_shape):
""" Retrieves a reflectivity array for a radar volume. """
try:
radar = pyart.io.read(file, include_fields=[refl_field,
ncp_field, vel_field])
reflect_array = deepcopy(radar.fields[refl_field]['data'])
ncp = radar.fields[ncp_field]['data']
height = radar.gate_z["data"]
up_in_the_air = height > 2000.0
the_mask = np.logical_or.reduce(
(ncp < 0.8, reflect_array.mask, up_in_the_air))
reflect_array = np.ma.masked_where(the_mask, reflect_array)
del radar
if reflect_array.shape == first_shape:
return reflect_array.filled(fill_value=np.nan)
except(TypeError, OSError):
print(file + ' is corrupt...skipping!')
return np.nan*np.zeros(first_shape)
if use_dask is False:
run_stats = _RunningStats()
first_shape = 0
for file in files:
try:
radar = pyart.io.read(file)
reflect_array = radar.fields[refl_field]['data']
ncp = deepcopy(radar.fields[ncp_field]['data'])
#reflect_array = np.ma.masked_where(ncp < 0.7, reflect_array)
if first_shape == 0:
first_shape = reflect_array.shape
clutter_radar = radar
run_stats.push(reflect_array)
if reflect_array.shape == first_shape:
run_stats.push(reflect_array)
del radar
except(TypeError, OSError):
print(file + ' is corrupt...skipping!')
continue
mean = run_stats.mean()
stdev = run_stats.standard_deviation()
clutter_values = stdev / mean
clutter_values = np.ma.masked_invalid(clutter_values)
clutter_values_no_mask = clutter_values.filled(
clutter_values_max + 1)
else:
cluster = LocalCluster(n_workers=20, processes=True)
client = Client(cluster)
first_shape = 0
i = 0
while first_shape == 0:
try:
radar = pyart.io.read(files[i])
reflect_array = radar.fields[refl_field]['data']
first_shape = reflect_array.shape
clutter_radar = radar
except(TypeError, OSError):
i = i + 1
print(file + ' is corrupt...skipping!')
continue
arrays = [delayed(get_reflect_array)(file, first_shape)
for file in files]
array = [da.from_delayed(a, shape=first_shape, dtype=float)
for a in arrays]
array = da.stack(array, axis=0)
print('## Calculating mean in parallel...')
mean = np.array(da.nanmean(array, axis=0))
print('## Calculating standard deviation...')
count = np.array(da.sum(da.isfinite(array), axis=0))
stdev = np.array(da.nanstd(array, axis=0))
clutter_values = stdev / mean
clutter_values = np.ma.masked_invalid(clutter_values)
clutter_values = np.ma.masked_where(np.logical_or(
clutter_values.mask, count < 20), clutter_values)
# Masked arrays can suck
clutter_values_no_mask = clutter_values.filled(
(clutter_thresh_max + 1))
shape = clutter_values.shape
mask = np.ma.getmask(clutter_values)
is_clutters = np.argwhere(
np.logical_and.reduce((clutter_values_no_mask > clutter_thresh_min,
clutter_values_no_mask < clutter_thresh_max,
)))
clutter_array = _clutter_marker(is_clutters, shape, mask, radius)
clutter_radar.fields.clear()
clutter_array = clutter_array.filled(0)
clutter_dict = _clutter_to_dict(clutter_array)
clutter_value_dict = _clutter_to_dict(clutter_values)
clutter_value_dict["long_name"] = "Clutter value (std. dev/mean Z)"
clutter_value_dict["standard_name"] = "clutter_value"
clutter_radar.add_field('ground_clutter', clutter_dict,
replace_existing=True)
clutter_radar.add_field('clutter_value', clutter_value_dict,
replace_existing=True)
if write_radar is True:
pyart.io.write_cfradial(out_file, clutter_radar)
del clutter_radar
return
# Adapted from http://stackoverflow.com/a/17637351/6392167
class _RunningStats():
""" Calculated Mean, Variance and Standard Deviation, but
uses the Welford algorithm to save memory. """
def __init__(self):
self.n = 0
self.old_m = 0
self.new_m = 0
self.old_s = 0
self.new_s = 0
def clear(self):
""" Clears n variable in stat calculation. """
self.n = 0
def push(self, x):
""" Takes an array and the previous array and calculates mean,
variance and standard deviation, and continues to take multiple
arrays one at a time. """
shape = x.shape
ones_arr = np.ones(shape)
mask = np.ma.getmask(x)
mask_ones = np.ma.array(ones_arr, mask=mask)
add_arr = np.ma.filled(mask_ones, fill_value=0.0)
self.n += add_arr
mask_n = np.ma.array(self.n, mask=mask)
fill_n = np.ma.filled(mask_n, fill_value=1.0)
if self.n.max() == 1.0:
self.old_m = self.new_m = np.ma.filled(x, 0.0)
self.old_s = np.zeros(shape)
else:
self.new_m = np.nansum(np.dstack(
(self.old_m, (x-self.old_m) / fill_n)), 2)
self.new_s = np.nansum(np.dstack(
(self.old_s, (x-self.old_m) * (x-self.new_m))), 2)
self.old_m = self.new_m
self.old_s = self.new_s
def mean(self):
""" Returns mean once all arrays are inputed. """
return self.new_m if np.any(self.n) else 0.0
def variance(self):
""" Returns variance once all arrays are inputed. """
return self.new_s / (self.n-1) if (self.n.max() > 1.0) else 0.0
def standard_deviation(self):
""" Returns standard deviation once all arrays are inputed. """
return np.ma.sqrt(self.variance())
def _clutter_marker(is_clutters, shape, mask, radius):
""" Takes clutter_values(stdev/mean)and the clutter_threshold
and calculates where X-SAPR wind farm clutter is occurring at
the SGP ARM site. """
temp_array = np.zeros(shape)
# Inserting here possible other fields that can help distinguish
# whether a gate is clutter or not.
temp_array = np.pad(temp_array, radius,
mode='constant', constant_values=-999)
is_clutters = is_clutters + radius
x_val, y_val = np.ogrid[-radius:(radius + 1),
-radius:(radius + 1)]
circle = (x_val*x_val) + (y_val*y_val) <= (radius*radius)
for is_clutter in is_clutters:
ray, gate = is_clutter[0], is_clutter[1]
frame = temp_array[ray - radius:ray + radius + 1,
gate - radius:gate + radius + 1]
temp_array[ray - radius:ray + radius + 1,
gate - radius:gate + radius + 1] = np.logical_or(
frame, circle)
temp_array = temp_array[radius:shape[0] + radius,
radius:shape[1] + radius]
clutter_array = np.ma.array(temp_array, mask=mask)
return clutter_array
def _clutter_to_dict(clutter_array):
""" Function that takes the clutter array
and turn it into a dictionary to be used and added
to the pyart radar object. """
clutter_dict = {}
clutter_dict['units'] = '1'
clutter_dict['data'] = clutter_array
clutter_dict['standard_name'] = 'ground_clutter'
clutter_dict['long_name'] = 'Ground Clutter'
clutter_dict['notes'] = '0: No Clutter, 1: Clutter'
return clutter_dict
|
zssherman/cmac2.0
|
cmac/radar_clutter.py
|
Python
|
bsd-3-clause
| 10,230
|
[
"NetCDF"
] |
d3e5d4efb372adf7e7e289ad5f6d621f2e73f9df306e35487f4183b6237d3672
|
#! /usr/bin/env python
from MDAnalysis import *
import numpy
import math
import sys
my_traj = sys.argv[1]
u = Universe("init.pdb",my_traj)
v = Universe("init.pdb")
end = my_traj.find('.pdb')
fout_angle1 = my_traj[0:end] + '_betafloor_glob_angle1.dat'
fout_angle2 = my_traj[0:end] + '_betafloor_glob_angle2.dat'
c = u.selectAtoms("(segid A and resid 4:77) or (segid B and resid 1:90)")
d = u.selectAtoms("segid A and resid 78:182")
e = u.selectAtoms("segid B and resid 91:190")
g = open(fout_angle1,'w')
f = open(fout_angle2,'w')
for ts in u.trajectory:
c_1,c_2,c_3 = c.principalAxes()
d_1,d_2,d_3 = d.principalAxes()
e_1,e_2,e_3 = e.principalAxes()
angle1_1 = math.degrees(math.acos(numpy.dot(c_1,d_1)))
angle1_2 = math.degrees(math.acos(numpy.dot(c_2,d_2)))
angle1_3 = math.degrees(math.acos(numpy.dot(c_3,d_3)))
angle2_1 = math.degrees(math.acos(numpy.dot(c_1,e_1)))
angle2_2 = math.degrees(math.acos(numpy.dot(c_2,e_2)))
angle2_3 = math.degrees(math.acos(numpy.dot(c_3,e_3)))
if angle1_1 > 90:
angle1_1 = 180-angle1_1
if angle1_2 > 90:
angle1_2 = 180-angle1_2
if angle1_3 > 90:
angle1_3 = 180-angle1_3
if angle2_1 > 90:
angle2_1 = 180-angle2_1
if angle2_2 > 90:
angle2_2 = 180-angle2_2
if angle2_3 > 90:
angle2_3 = 180-angle2_3
g.write('%7.3f %7.3f %7.3f\n' % (angle1_1,angle1_2,angle1_3))
f.write('%7.3f %7.3f %7.3f\n' % (angle2_1,angle2_2,angle2_3))
g.close()
f.close()
|
demharters/git_scripts
|
angle_betafloor_indiv_glob_mhcii.py
|
Python
|
apache-2.0
| 1,523
|
[
"MDAnalysis"
] |
5bb3dd63bd7ae43c55da434d3b01e0b67d4dacbb2ec5ef33303fe7e88ec55d91
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from pyscf import gto
from pyscf.geomopt import addons
class KnownValues(unittest.TestCase):
def test_symmetrize(self):
mol = gto.M(atom='''
O 0. 0. 0.
H 0. -0.757 0.587
H 0. 0.757 0.587
''', symmetry=True)
coords = mol.atom_coords()
sym_coords = addons.symmetrize(mol, coords)
self.assertAlmostEqual(abs(coords-sym_coords).max(), 0, 9)
if __name__ == "__main__":
print("Tests for addons")
unittest.main()
|
gkc1000/pyscf
|
pyscf/geomopt/test/test_addons.py
|
Python
|
apache-2.0
| 1,179
|
[
"PySCF"
] |
bb856028e34b8c878b4f981db254698d9e20daeaf2827bbd005dc5004e75126c
|
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import unittest_decorators as utx
import espressomd
@utx.skipIfMissingFeatures("LENNARD_JONES")
class TuneSkin(ut.TestCase):
system = espressomd.System(box_l=[1.35, 2.4, 1.7])
system.time_step = 0.01
def setUp(self):
self.system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=1,
sigma=0.2,
cutoff=0.3,
shift="auto")
def test_fails_without_adjustment(self):
with self.assertRaisesRegex(Exception, 'Error during tune_skin'):
self.system.cell_system.tune_skin(
min_skin=0.1,
max_skin=0.6,
tol=0.05,
int_steps=3)
def test_works_with_adjustment(self):
self.system.cell_system.tune_skin(
min_skin=0.1,
max_skin=0.6,
tol=0.05,
int_steps=3,
adjust_max_skin=True)
if __name__ == "__main__":
ut.main()
|
espressomd/espresso
|
testsuite/python/tune_skin.py
|
Python
|
gpl-3.0
| 1,674
|
[
"ESPResSo"
] |
4d87c0bc3a19d620a18aa1e8d886856f69e6e185fb516a4a2aaeb0a7b38c092e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 Brian McClure
#
# django-octopus is free software under terms of the MIT License.
#
from setuptools import setup, find_packages
description = """"""
setup(
name='django-octopus',
version='0.4.1',
packages=find_packages(),
include_package_data=True,
requires=['python (>= 3.6)', 'django (>= 1.8)'],
description='A simple AJAX pull framework for django, now with full' \
'featured demo',
author='Brian McClure',
author_email='brian@mcclure.pw',
url='https://github.com/brmc/django-octopus',
download_url='https://github.com/brmc/django-octopus.git',
license='MIT License',
keywords='django, ajax, front-end, pull',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
],
)
|
brmc/django-octopus
|
setup.py
|
Python
|
mit
| 985
|
[
"Brian",
"Octopus"
] |
8176ce79293dc38ce44c57067a8d946bc8c66d9d57c41a01fe7e3dd6903c0274
|
#!/usr/bin/python
"""
PyMOL plugin that provides show_contacts command and GUI
for highlighting good and bad polar contacts.
Factored out of clustermols by Matthew Baumgartner.
The advantage of this package is it requires many fewer dependencies.
Modified: Marcin Magnus 2020
Source <https://pymolwiki.org/index.php/Pymol-script-repo>
"""
from __future__ import print_function
import sys
import os
from pymol import cmd
print("""show_contacts
-------------------------------------
_polar: good polar interactions according to PyMOL
_polar_ok: compute possibly suboptimal polar interactions using the user specified distance
_aa: acceptors acceptors
_dd: donors donors
_all is all ;-) above!""")
DEBUG=1
def show_contacts(selection='*', selection2='*',
result="contacts",
cutoff=3.6,
bigcutoff = 4.0,
labels=False,
SC_DEBUG = DEBUG):
"""
USAGE
show_contacts selection, selection2, [result=contacts],[cutoff=3.6],[bigcutoff=4.0]
Show various polar contacts, the good, the bad, and the ugly.
Edit MPB 6-26-14: The distances are heavy atom distances, so I upped the default cutoff to 4.0
Returns:
True/False - if False, something went wrong
"""
if SC_DEBUG > 4:
print('Starting show_contacts')
print('selection = "' + selection + '"')
print('selection2 = "' + selection2 + '"')
result = cmd.get_legal_name(result)
#if the group of contacts already exist, delete them
cmd.delete(result)
# ensure only N and O atoms are in the selection
all_don_acc1 = selection + " and (donor or acceptor)"
all_don_acc2 = selection2 + " and (donor or acceptor)"
if SC_DEBUG > 4:
print('all_don_acc1 = "' + all_don_acc1 + '"')
print('all_don_acc2 = "' + all_don_acc2 + '"')
#if theses selections turn out not to have any atoms in them, pymol throws cryptic errors when calling the dist function like:
#'Selector-Error: Invalid selection name'
#So for each one, manually perform the selection and then pass the reference to the distance command and at the end, clean up the selections
#the return values are the count of the number of atoms
all1_sele_count = cmd.select('all_don_acc1_sele', all_don_acc1)
all2_sele_count = cmd.select('all_don_acc2_sele', all_don_acc2)
#print out some warnings
if DEBUG > 3:
if not all1_sele_count:
print('Warning: all_don_acc1 selection empty!')
if not all2_sele_count:
print('Warning: all_don_acc2 selection empty!')
########################################
allres = result + "_all"
if all1_sele_count and all2_sele_count:
#print(allres)
#print(cmd.get_distance(allres, 'all_don_acc1_sele', 'all_don_acc2_sele', bigcutoff, mode = 0))
any = cmd.distance(allres, 'all_don_acc1_sele', 'all_don_acc2_sele', bigcutoff, mode = 0)
# if any is 0 it seems that there is no distance!
if any:
cmd.set("dash_radius", "0.05", allres)
if not labels:
cmd.hide("labels", allres)
else:
# just do nothing and clena up
print('no contacts')
cmd.delete('all_don_acc1_sele')
cmd.delete('all_don_acc2_sele')
cmd.delete(result + "_all")
return None
########################################
# compute good polar interactions according to pymol
polres = result + "_polar"
if all1_sele_count and all2_sele_count:
cmd.distance(polres, 'all_don_acc1_sele', 'all_don_acc2_sele', cutoff, mode = 2) #hopefully this checks angles? Yes
#cmd.set("dash_color", "marine", allres)
#cmd.set('dash_gap', '0')
cmd.set("dash_radius","0.2", polres) #"0.126"
#cmd.set("dash_color", "marine", allres)
if not labels:
cmd.hide("labels", polres)
########################################
# When running distance in mode=2, the cutoff parameter is ignored if set higher then the default of 3.6
# so set it to the passed in cutoff and change it back when you are done.
old_h_bond_cutoff_center = cmd.get('h_bond_cutoff_center') # ideal geometry
old_h_bond_cutoff_edge = cmd.get('h_bond_cutoff_edge') # minimally acceptable geometry
cmd.set('h_bond_cutoff_center', bigcutoff)
cmd.set('h_bond_cutoff_edge', bigcutoff)
# compute possibly suboptimal polar interactions using the user specified distance
pol_ok_res = result + "_polar_ok"
if all1_sele_count and all2_sele_count:
cmd.distance(pol_ok_res, 'all_don_acc1_sele', 'all_don_acc2_sele', bigcutoff, mode = 2)
cmd.set("dash_radius", "0.06", pol_ok_res)
if not labels:
cmd.hide("labels", pol_ok_res)
#now reset the h_bond cutoffs
cmd.set('h_bond_cutoff_center', old_h_bond_cutoff_center)
cmd.set('h_bond_cutoff_edge', old_h_bond_cutoff_edge)
########################################
onlyacceptors1 = selection + " and (acceptor and !donor)"
onlyacceptors2 = selection2 + " and (acceptor and !donor)"
onlydonors1 = selection + " and (!acceptor and donor)"
onlydonors2 = selection2 + " and (!acceptor and donor)"
#perform the selections
onlyacceptors1_sele_count = cmd.select('onlyacceptors1_sele', onlyacceptors1)
onlyacceptors2_sele_count = cmd.select('onlyacceptors2_sele', onlyacceptors2)
onlydonors1_sele_count = cmd.select('onlydonors1_sele', onlydonors1)
onlydonors2_sele_count = cmd.select('onlydonors2_sele', onlydonors2)
#print out some warnings
if SC_DEBUG > 2:
if not onlyacceptors1_sele_count:
print('Warning: onlyacceptors1 selection empty!')
if not onlyacceptors2_sele_count:
print('Warning: onlyacceptors2 selection empty!')
if not onlydonors1_sele_count:
print('Warning: onlydonors1 selection empty!')
if not onlydonors2_sele_count:
print('Warning: onlydonors2 selection empty!')
# acceptors acceptors
accres = result+"_aa"
if onlyacceptors1_sele_count and onlyacceptors2_sele_count:
aa_dist_out = cmd.distance(accres, 'onlyacceptors1_sele', 'onlyacceptors2_sele', cutoff, 0)
if aa_dist_out < 0:
print('\n\nCaught a pymol selection error in acceptor-acceptor selection of show_contacts')
print('accres:', accres)
print('onlyacceptors1', onlyacceptors1)
print('onlyacceptors2', onlyacceptors2)
return False
cmd.set("dash_color","red",accres)
cmd.set("dash_radius","0.125",accres)
if not labels:
cmd.hide("labels", accres)
########################################
# donors donors
donres = result+"_dd"
if onlydonors1_sele_count and onlydonors2_sele_count:
dd_dist_out = cmd.distance(donres, 'onlydonors1_sele', 'onlydonors2_sele', cutoff, 0)
#try to catch the error state
if dd_dist_out < 0:
print('\n\nCaught a pymol selection error in dd selection of show_contacts')
print('donres:', donres)
print('onlydonors1', onlydonors1)
print('onlydonors2', onlydonors2)
print("cmd.distance('" + donres + "', '" + onlydonors1 + "', '" + onlydonors2 + "', " + str(cutoff) + ", 0)")
return False
cmd.set("dash_color","red",donres)
cmd.set("dash_radius","0.125",donres)
if not labels:
cmd.hide("labels", donres)
##########################################################
##### find the buried unpaired atoms of the receptor #####
##########################################################
#initialize the variable for when CALC_SASA is False
unpaired_atoms = ''
## Group
print(allres) # contacts_all
cmd.group(result,"%s %s %s %s %s %s" % (polres, allres, accres, donres, pol_ok_res, unpaired_atoms))
## Clean up the selection objects
#if the show_contacts debug level is high enough, don't delete them.
if SC_DEBUG < 5:
cmd.delete('all_don_acc1_sele')
cmd.delete('all_don_acc2_sele')
cmd.delete('onlyacceptors1_sele')
cmd.delete('onlyacceptors2_sele')
cmd.delete('onlydonors1_sele')
cmd.delete('onlydonors2_sele')
cmd.disable('contacts_all')
cmd.disable('contacts_polar_ok')
cmd.disable('contacts_aa')
cmd.disable('contacts_dd')
return True
cmd.extend('contacts', show_contacts) #contacts to avoid clashing with cluster_mols version
#################################################################################
########################### Start of pymol plugin code ##########################
#################################################################################
about_text = '''show_contacts was factored out of the much more full-featured cluster_mols
by Dr. Matt Baumgartner (https://pymolwiki.org/index.php/Cluster_mols). It provides
an easy way to highlight polar contacts (and clashes) between two selections without
requiring the installation of additional dependencies.
'''
class Show_Contacts:
''' Tk version of the Plugin GUI '''
def __init__(self, app):
parent = app.root
self.parent = parent
self.app = app
import Pmw
############################################################################################
### Open a window with options to select to loaded objects ###
############################################################################################
self.select_dialog = Pmw.Dialog(parent,
buttons = ('Ok','Cancel'),
title = 'Show Contacts Plugin',
command = self.button_pressed )
self.select_dialog.withdraw()
#allow the user to select from objects already loaded in pymol
self.select_object_combo_box = Pmw.ComboBox(self.select_dialog.interior(),
scrolledlist_items=[],
labelpos='w',
label_text='Select loaded object:',
listbox_height = 2,
dropdown=True)
self.select_object_combo_box2 = Pmw.ComboBox(self.select_dialog.interior(),
scrolledlist_items=[],
labelpos='w',
label_text='Select loaded object:',
listbox_height = 2,
dropdown=True)
self.select_object_combo_box.grid(column=1, row=0)
self.select_object_combo_box2.grid(column=2, row=0)
self.populate_ligand_select_list()
self.select_dialog.show()
def button_pressed(self, result):
if hasattr(result,'keycode'):
if result.keycode == 36:
print('keycode:', result.keycode)
elif result == 'Ok' or result == 'Exit' or result == None:
s1 = self.select_object_combo_box.get()
s2 = self.select_object_combo_box2.get()
show_contacts(s1,s2,'%s_%s'%(s1,s2))
self.select_dialog.withdraw()
elif result == 'Cancel' or result == None:
self.select_dialog.withdraw()
def populate_ligand_select_list(self):
''' Go thourgh the loaded objects in PyMOL and add them to the selected list. '''
#get the loaded objects
loaded_objects = _get_select_list()
self.select_object_combo_box.clear()
self.select_object_combo_box2.clear()
for ob in loaded_objects:
self.select_object_combo_box.insert('end', ob)
self.select_object_combo_box2.insert('end', ob)
def _get_select_list():
'''
Get either a list of object names, or a list of chain selections
'''
loaded_objects = [name for name in cmd.get_names('all', 1) if '_cluster_' not in name]
# if single object, try chain selections
if len(loaded_objects) == 1:
chains = cmd.get_chains(loaded_objects[0])
if len(chains) > 1:
loaded_objects = ['{} & chain {}'.format(loaded_objects[0], chain) for chain in chains]
return loaded_objects
class Show_Contacts_Qt_Dialog(object):
''' Qt version of the Plugin GUI '''
def __init__(self):
from pymol.Qt import QtWidgets
dialog = QtWidgets.QDialog()
self.setupUi(dialog)
self.populate_ligand_select_list()
dialog.accepted.connect(self.accept)
dialog.exec_()
def accept(self):
s1 = self.select_object_combo_box.currentText()
s2 = self.select_object_combo_box2.currentText()
show_contacts(s1, s2, '%s_%s' % (s1, s2))
def populate_ligand_select_list(self):
loaded_objects = _get_select_list()
self.select_object_combo_box.clear()
self.select_object_combo_box2.clear()
self.select_object_combo_box.addItems(loaded_objects)
self.select_object_combo_box2.addItems(loaded_objects)
if len(loaded_objects) > 1:
self.select_object_combo_box2.setCurrentIndex(1)
def setupUi(self, Dialog):
# Based on auto-generated code from ui file
from pymol.Qt import QtCore, QtWidgets
Dialog.resize(400, 50)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
label = QtWidgets.QLabel("Select loaded object:", Dialog)
self.gridLayout.addWidget(label, 0, 0, 1, 1)
self.select_object_combo_box = QtWidgets.QComboBox(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
self.select_object_combo_box.setSizePolicy(sizePolicy)
self.select_object_combo_box.setEditable(True)
self.gridLayout.addWidget(self.select_object_combo_box, 0, 1, 1, 1)
label = QtWidgets.QLabel("Select loaded object:", Dialog)
self.gridLayout.addWidget(label, 1, 0, 1, 1)
self.select_object_combo_box2 = QtWidgets.QComboBox(Dialog)
self.select_object_combo_box2.setSizePolicy(sizePolicy)
self.select_object_combo_box2.setEditable(True)
self.gridLayout.addWidget(self.select_object_combo_box2, 1, 1, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.gridLayout.addWidget(self.buttonBox, 2, 0, 1, 2)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
def __init__(self):
try:
from pymol.plugins import addmenuitemqt
addmenuitemqt('Show Contacts', Show_Contacts_Qt_Dialog)
return
except Exception as e:
print(e)
self.menuBar.addmenuitem('Plugin', 'command', 'Show Contacts', label = 'Show Contacts', command = lambda s=self : Show_Contacts(s))
|
m4rx9/rna-pdb-tools
|
rna_tools/tools/PyMOL4RNA/libs/show_contacts.py
|
Python
|
mit
| 15,707
|
[
"PyMOL"
] |
fbbcee215b72c66bf78fb27a321046c7b5abcfe60bfce12c60d73bd92a177b24
|
#!/usr/bin/env python
########################################################################
# $HeadURL$
# File : dirac-admin-sort-cs-sites
# Author : Matvey Sapunov
########################################################################
"""
Sort site names at CS in "/Resources" section. Sort can be alphabetic or by country postfix in a site name.
Alphabetic sort is default (i.e. LCG.IHEP.cn, LCG.IHEP.su, LCG.IN2P3.fr)
Options:
-C --country Sort site names by country postfix (i.e. LCG.IHEP.cn, LCG.IN2P3.fr, LCG.IHEP.su)
-R --reverse Reverse the sort order
Argument:
Name of the subsection in the CS '/Resources/Sites/' section to be sorted (i.e. LCG, DIRAC)
Example: dirac-admin-sort-cs-sites -C DIRAC
Sort sites in subsection /Resources/Sites/DIRAC by country postfix
"""
__RCSID__ = "$Id$"
from DIRAC import gLogger, exit as DIRACExit
from DIRAC.Core.Base import Script
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getPropertiesForGroup
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.Core.Utilities.Time import dateTime , toString
global SORTBYNAME, REVERSE
SORTBYNAME = True
REVERSE = False
def sortBy( arg ):
global SORTBYNAME
SORTBYNAME = False
def isReverse( arg ):
global REVERSE
REVERSE = True
def country( arg ):
cb = arg.split( "." )
if not len( cb ) == 3:
gLogger.error( "%s is not in GRID.NAME.COUNTRY format " )
return False
return cb[ 2 ]
Script.registerSwitch( "C", "country", "Sort site names by country postfix (i.e. LCG.IHEP.cn, LCG.IN2P3.fr, LCG.IHEP.su)" , sortBy )
Script.registerSwitch( "R", "reverse", "Reverse the sort order" , isReverse )
Script.setUsageMessage( "\n".join( [ __doc__.split( "\n" )[ 1 ]
,"Usage:"
," %s [option|cfgfile] <Section>" % Script.scriptName
,"Optional arguments:"
," Section: Name of the subsection in '/Resources/Sites/' for sort (i.e. LCG DIRAC)"
,"Example:"
," dirac-admin-sort-cs-sites -C CLOUDS DIRAC"
," sort site names by country postfix in '/Resources/Sites/CLOUDS' and '/Resources/Sites/DIRAC' subsection"
,"" ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
result = getProxyInfo()
if not result[ "OK" ]:
gLogger.error( "Failed to get proxy information", result[ "Message" ] )
DIRACExit( 2 )
proxy = result[ "Value" ]
if proxy[ "secondsLeft" ] < 1:
gLogger.error( "Your proxy has expired, please create new one" )
DIRACExit( 2 )
group = proxy[ "group" ]
if not "CSAdministrator" in getPropertiesForGroup( group ):
gLogger.error( "You must be CSAdministrator user to execute this script" )
gLogger.notice( "Please issue 'dirac-proxy-init -g [group with CSAdministrator Property]'" )
DIRACExit( 2 )
cs = CSAPI()
result = cs.getCurrentCFG()
if not result[ "OK" ]:
gLogger.error( "Failed to get copy of CS", result[ "Message" ] )
DIRACExit( 2 )
cfg = result[ "Value" ]
if not cfg.isSection( "Resources" ):
gLogger.error( "Section '/Resources' is absent in CS" )
DIRACExit( 2 )
if not cfg.isSection( "Resources/Sites" ):
gLogger.error( "Subsection '/Resources/Sites' is absent in CS" )
DIRACExit( 2 )
if args and len( args ) > 0:
resultList = args[ : ]
else:
resultList = cfg[ "Resources" ][ "Sites" ].listSections()
hasRun = False
isDirty = False
for i in resultList:
if not cfg.isSection( "Resources/Sites/%s" % i ):
gLogger.error( "Subsection /Resources/Sites/%s does not exists" % i )
continue
hasRun = True
if SORTBYNAME:
dirty = cfg[ "Resources" ][ "Sites" ][ i ].sortAlphabetically( ascending = not REVERSE )
else:
dirty = cfg[ "Resources" ][ "Sites" ][ i ].sortByKey( key = country , reverse = REVERSE )
if dirty:
isDirty = True
if not hasRun:
gLogger.notice( "Failed to find suitable subsections with site names to sort" )
DIRACExit( 0 )
if not isDirty:
gLogger.notice( "Nothing to do, site names are already sorted" )
DIRACExit( 0 )
timestamp = toString( dateTime() )
stamp = "Site names are sorted by %s script at %s" % ( Script.scriptName , timestamp )
cs.setOptionComment( "/Resources/Sites" , stamp )
result = cs.commit()
if not result[ "OK" ]:
gLogger.error( "Failed to commit changes to CS", result[ "Message" ] )
DIRACExit( 2 )
gLogger.notice( "Site names are sorted and committed to CS" )
DIRACExit( 0 )
|
sposs/DIRAC
|
Core/scripts/dirac-admin-sort-cs-sites.py
|
Python
|
gpl-3.0
| 4,921
|
[
"DIRAC"
] |
8018b5d582afab527f6eff340cfbaf2d670ab5e40238fa505ebe90477d76d179
|
from galaxy.web.base.controller import *
class Mobile( BaseUIController ):
@web.expose
def index( self, trans, **kwargs ):
return trans.fill_template( "mobile/index.mako" )
@web.expose
def history_list( self, trans ):
return trans.fill_template( "mobile/history/list.mako" )
@web.expose
def history_detail( self, trans, id ):
history = trans.sa_session.query( trans.app.model.History ).get( id )
assert history.user == trans.user
return trans.fill_template( "mobile/history/detail.mako", history=history )
@web.expose
def dataset_detail( self, trans, id ):
dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
assert dataset.history.user == trans.user
return trans.fill_template( "mobile/dataset/detail.mako", dataset=dataset )
@web.expose
def dataset_peek( self, trans, id ):
dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
assert dataset.history.user == trans.user
return trans.fill_template( "mobile/dataset/peek.mako", dataset=dataset )
@web.expose
def settings( self, trans, email=None, password=None ):
message = None
if email is not None and password is not None:
if email == "":
self.__logout( trans )
message = "Logged out"
else:
error = self.__login( trans, email, password )
message = error or "Login changed"
return trans.fill_template( "mobile/settings.mako", message=message )
def __logout( self, trans ):
trans.log_event( "User logged out" )
trans.handle_user_logout()
def __login( self, trans, email="", password="" ):
error = password_error = None
user = trans.sa_session.query( model.User ).filter_by( email = email ).first()
if not user:
error = "No such user (please note that login is case sensitive)"
elif user.deleted:
error = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
elif user.external:
error = "This account was created for use with an external authentication method, contact your local Galaxy administrator to activate it."
elif not user.check_password( password ):
error = "Invalid password"
else:
trans.handle_user_login( user )
trans.log_event( "User logged in" )
return error
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/webapps/galaxy/controllers/mobile.py
|
Python
|
gpl-3.0
| 2,554
|
[
"Galaxy"
] |
f596e961a3d8ddc98dac7161af634a52d317db8956861e7fdd6a676b50df79e6
|
#-------------------------------------------------------------------------------
# Cloud-COPASI
# Copyright (c) 2013 Edward Kent.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Public License v3.0
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/gpl.html
#-------------------------------------------------------------------------------
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseServerError
from django.views.generic import TemplateView, RedirectView, View, FormView
from django.views.generic.edit import FormMixin, ProcessFormView
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse_lazy
from django import forms
from cloud_copasi.web_interface.views import RestrictedView, DefaultView, RestrictedFormView
from cloud_copasi.web_interface.models import AWSAccessKey, VPCConnection, CondorPool, EC2Instance
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required, permission_required
import sys
from django.contrib.auth.forms import PasswordChangeForm
from cloud_copasi.web_interface.aws import vpc_tools, aws_tools, ec2_tools,\
resource_management_tools
from cloud_copasi.web_interface import models
from boto.exception import EC2ResponseError, BotoServerError
from cloud_copasi.web_interface.models import VPC
import logging
log = logging.getLogger(__name__)
class ResourceOverviewView(RestrictedView):
"""View to display active compute pools
"""
template_name = 'account/resource_overview.html'
page_title = 'AWS Resource Overview'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
#Get list of resources
keys = AWSAccessKey.objects.filter(user=request.user) | AWSAccessKey.objects.filter(copy_of__user=request.user)
overview=[]
for key in keys:
recognized_resources=resource_management_tools.get_recognized_resources(user=request.user, key=key)
unrecognized_resources = resource_management_tools.get_unrecognized_resources(user=request.user,key=key)
overview.append((key, recognized_resources, unrecognized_resources))
kwargs['overview'] = overview
return super(ResourceOverviewView, self).dispatch(request, *args, **kwargs)
class ResourceTerminateView(RestrictedView):
page_title = 'Confirm termination of AWS resources'
template_name = 'account/resource_terminate.html'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
#Get list of resources
kwargs['show_loading_screen'] = True
kwargs['loading_title'] = 'Terminating resources'
kwargs['loading_description'] = 'Please be patient and do not navigate away from this page. Terminating resources can take several minutes'
if kwargs['key_id'] == 'all':
resources = resource_management_tools.get_unrecognized_resources(request.user)
else:
key_id = kwargs['key_id']
key=AWSAccessKey.objects.get(id=key_id)
assert key.user == request.user
resources = resource_management_tools.get_unrecognized_resources(request.user, key)
if kwargs['confirmed']:
resource_management_tools.terminate_resources(request.user, resources)
return HttpResponseRedirect(reverse_lazy('my_account'))
kwargs['resources'] = resources
return super(ResourceTerminateView, self).dispatch(request, *args, **kwargs)
|
edkent/cloud-copasi
|
cloud_copasi/web_interface/aws/resource_views.py
|
Python
|
gpl-3.0
| 3,715
|
[
"COPASI"
] |
69c141d8562887e85f0e62a7bb890cde6a077fd099f408be25a4d925682de8b6
|
# (C) British Crown Copyright 2010 - 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Provides an interface to manage URI scheme support in iris.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
import glob
import os.path
import types
import re
import collections
import iris.fileformats
import iris.cube
import iris.exceptions
# Saving routines, indexed by file extension.
class _SaversDict(dict):
"""A dictionary that can only have string keys with no overlap."""
def __setitem__(self, key, value):
if not isinstance(key, six.string_types):
raise ValueError("key is not a string")
if key in self:
raise ValueError("A saver already exists for", key)
for k in self.keys():
if k.endswith(key) or key.endswith(k):
raise ValueError("key %s conflicts with existing key %s" % (key, k))
dict.__setitem__(self, key, value)
_savers = _SaversDict()
def run_callback(callback, cube, field, filename):
"""
Runs the callback mechanism given the appropriate arguments.
Args:
* callback:
A function to add metadata from the originating field and/or URI which
obeys the following rules:
1. Function signature must be: ``(cube, field, filename)``.
2. Modifies the given cube inplace, unless a new cube is
returned by the function.
3. If the cube is to be rejected the callback must raise
an :class:`iris.exceptions.IgnoreCubeException`.
.. note::
It is possible that this function returns None for certain callbacks,
the caller of this function should handle this case.
"""
if callback is None:
return cube
# Call the callback function on the cube, generally the function will
# operate on the cube in place, but it is also possible that the function
# will return a completely new cube instance.
try:
result = callback(cube, field, filename)
except iris.exceptions.IgnoreCubeException:
result = None
else:
if result is None:
result = cube
elif not isinstance(result, iris.cube.Cube):
raise TypeError("Callback function returned an "
"unhandled data type.")
return result
def decode_uri(uri, default='file'):
r'''
Decodes a single URI into scheme and scheme-specific parts.
In addition to well-formed URIs, it also supports bare file paths.
Both Windows and UNIX style paths are accepted.
.. testsetup::
from iris.io import *
Examples:
>>> from iris.io import decode_uri
>>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b'))
('http', '//www.thing.com:8080/resource?id=a:b')
>>> print(decode_uri('file:///data/local/dataZoo/...'))
('file', '///data/local/dataZoo/...')
>>> print(decode_uri('/data/local/dataZoo/...'))
('file', '/data/local/dataZoo/...')
>>> print(decode_uri('file:///C:\data\local\dataZoo\...'))
('file', '///C:\\data\\local\\dataZoo\\...')
>>> print(decode_uri('C:\data\local\dataZoo\...'))
('file', 'C:\\data\\local\\dataZoo\\...')
>>> print(decode_uri('dataZoo/...'))
('file', 'dataZoo/...')
'''
# make sure scheme has at least 2 letters to avoid windows drives
# put - last in the brackets so it refers to the character, not a range
# reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1
match = re.match(r"^([a-zA-Z][a-zA-Z0-9+.-]+):(.+)", uri)
if match:
scheme = match.group(1)
part = match.group(2)
else:
# Catch bare UNIX and Windows paths
scheme = default
part = uri
return scheme, part
def expand_filespecs(file_specs):
"""
Find all matching file paths from a list of file-specs.
Args:
* file_specs (iterable of string):
File paths which may contain '~' elements or wildcards.
Returns:
A list of matching file paths. If any of the file-specs matches no
existing files, an exception is raised.
"""
# Remove any hostname component - currently unused
filenames = [os.path.expanduser(fn[2:] if fn.startswith('//') else fn)
for fn in file_specs]
# Try to expand all filenames as globs
glob_expanded = {fn : sorted(glob.glob(fn)) for fn in filenames}
# If any of the specs expanded to an empty list then raise an error
value_lists = glob_expanded.values()
if not all(value_lists):
raise IOError("One or more of the files specified did not exist %s." %
["%s expanded to %s" % (pattern, expanded if expanded else "empty")
for pattern, expanded in six.iteritems(glob_expanded)])
return sum(value_lists, [])
def load_files(filenames, callback, constraints=None):
"""
Takes a list of filenames which may also be globs, and optionally a
constraint set and a callback function, and returns a
generator of Cubes from the given files.
.. note::
Typically, this function should not be called directly; instead, the
intended interface for loading is :func:`iris.load`.
"""
all_file_paths = expand_filespecs(filenames)
# Create default dict mapping iris format handler to its associated filenames
handler_map = collections.defaultdict(list)
for fn in all_file_paths:
with open(fn, 'rb') as fh:
handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec(os.path.basename(fn), fh)
handler_map[handling_format_spec].append(fn)
# Call each iris format handler with the approriate filenames
for handling_format_spec in sorted(handler_map):
fnames = handler_map[handling_format_spec]
if handling_format_spec.constraint_aware_handler:
for cube in handling_format_spec.handler(fnames, callback,
constraints):
yield cube
else:
for cube in handling_format_spec.handler(fnames, callback):
yield cube
def load_http(urls, callback):
"""
Takes a list of urls and a callback function, and returns a generator
of Cubes from the given URLs.
.. note::
Typically, this function should not be called directly; instead, the
intended interface for loading is :func:`iris.load`.
"""
# Create default dict mapping iris format handler to its associated filenames
handler_map = collections.defaultdict(list)
for url in urls:
handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec(url, None)
handler_map[handling_format_spec].append(url)
# Call each iris format handler with the appropriate filenames
for handling_format_spec in sorted(handler_map):
fnames = handler_map[handling_format_spec]
for cube in handling_format_spec.handler(fnames, callback):
yield cube
def _dot_save(cube, target):
# A simple wrapper for `iris.fileformats.dot.save` which allows the
# saver to be registered without triggering the import of
# `iris.fileformats.dot`.
import iris.fileformats.dot
return iris.fileformats.dot.save(cube, target)
def _dot_save_png(cube, target, **kwargs):
# A simple wrapper for `iris.fileformats.dot.save_png` which allows the
# saver to be registered without triggering the import of
# `iris.fileformats.dot`.
import iris.fileformats.dot
return iris.fileformats.dot.save_png(cube, target, **kwargs)
def _grib_save(cube, target, append=False, **kwargs):
# A simple wrapper for the grib save routine, which allows the saver to be
# registered without having the grib implementation installed.
try:
import iris_grib as igrib
except ImportError:
try:
import gribapi
except ImportError:
raise RuntimeError('Unable to save GRIB file - the ECMWF '
'`gribapi` package is not installed.')
from iris.fileformats import grib as igrib
return igrib.save_grib2(cube, target, append, **kwargs)
def _check_init_savers():
# TODO: Raise a ticket to resolve the cyclic import error that requires
# us to initialise this on first use. Probably merge io and fileformats.
if "pp" not in _savers:
_savers.update({"pp": iris.fileformats.pp.save,
"nc": iris.fileformats.netcdf.save,
"dot": _dot_save,
"dotpng": _dot_save_png,
"grib2": _grib_save})
def add_saver(file_extension, new_saver):
"""
Add a custom saver to the Iris session.
Args:
* file_extension - A string such as "pp" or "my_format".
* new_saver - A function of the form ``my_saver(cube, target)``.
See also :func:`iris.io.save`
"""
# Make sure it's a func with 2+ args
if not hasattr(new_saver, "__call__") or new_saver.__code__.co_argcount < 2:
raise ValueError("Saver routines must be callable with 2+ arguments.")
# Try to add this saver. Invalid keys will be rejected.
_savers[file_extension] = new_saver
def find_saver(filespec):
"""
Find the saver function appropriate to the given filename or extension.
Args:
* filespec - A string such as "my_file.pp" or "PP".
Returns:
A save function or None.
Save functions can be passed to :func:`iris.io.save`.
"""
_check_init_savers()
matches = [ext for ext in _savers if filespec.lower().endswith('.' + ext) or
filespec.lower() == ext]
# Multiple matches could occur if one of the savers included a '.':
# e.g. _savers = {'.dot.png': dot_png_saver, '.png': png_saver}
if len(matches) > 1:
fmt = "Multiple savers found for %r: %s"
matches = ', '.join(map(repr, matches))
raise ValueError(fmt % (filespec, matches))
return _savers[matches[0]] if matches else None
def save(source, target, saver=None, **kwargs):
"""
Save one or more Cubes to file (or other writable).
Iris currently supports three file formats for saving, which it can
recognise by filename extension:
* netCDF - the Unidata network Common Data Format:
* see :func:`iris.fileformats.netcdf.save`
* GRIB2 - the WMO GRIdded Binary data format;
* see <http://https://github.com/SciTools/iris-grib>.
* PP - the Met Office UM Post Processing Format.
* see :func:`iris.fileformats.pp.save`
A custom saver can be provided to the function to write to a different
file format.
Args:
* source - A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or
sequence of cubes.
* target - A filename (or writable, depending on file format).
When given a filename or file, Iris can determine the
file format.
Kwargs:
* saver - Optional. Specifies the save function to use.
If omitted, Iris will attempt to determine the format.
This keyword can be used to implement a custom save
format. Function form must be:
``my_saver(cube, target)`` plus any custom keywords. It
is assumed that a saver will accept an ``append`` keyword
if it's file format can handle multiple cubes. See also
:func:`iris.io.add_saver`.
All other keywords are passed through to the saver function; see the
relevant saver documentation for more information on keyword arguments.
Examples::
# Save a cube to PP
iris.save(my_cube, "myfile.pp")
# Save a cube list to a PP file, appending to the contents of the file
# if it already exists
iris.save(my_cube_list, "myfile.pp", append=True)
# Save a cube to netCDF, defaults to NETCDF4 file format
iris.save(my_cube, "myfile.nc")
# Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option
iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC")
.. warning::
Saving a cube whose data has been loaded lazily
(if `cube.has_lazy_data()` returns `True`) to the same file it expects
to load data from will cause both the data in-memory and the data on
disk to be lost.
.. code-block:: python
cube = iris.load_cube('somefile.nc')
# The next line causes data loss in 'somefile.nc' and the cube.
iris.save(cube, 'somefile.nc')
In general, overwriting a file which is the source for any lazily loaded
data can result in corruption. Users should proceed with caution when
attempting to overwrite an existing file.
"""
# Determine format from filename
if isinstance(target, six.string_types) and saver is None:
saver = find_saver(target)
elif hasattr(target, 'name') and saver is None:
saver = find_saver(target.name)
elif isinstance(saver, six.string_types):
saver = find_saver(saver)
if saver is None:
raise ValueError("Cannot save; no saver")
# Single cube?
if isinstance(source, iris.cube.Cube):
saver(source, target, **kwargs)
# CubeList or sequence of cubes?
elif (isinstance(source, iris.cube.CubeList) or
(isinstance(source, (list, tuple)) and
all([isinstance(i, iris.cube.Cube) for i in source]))):
# Only allow cubelist saving for those fileformats that are capable.
if not 'iris.fileformats.netcdf' in saver.__module__:
# Make sure the saver accepts an append keyword
if not "append" in saver.__code__.co_varnames:
raise ValueError("Cannot append cubes using saver function "
"'%s' in '%s'" %
(saver.__code__.co_name,
saver.__code__.co_filename))
# Force append=True for the tail cubes. Don't modify the incoming
# kwargs.
kwargs = kwargs.copy()
for i, cube in enumerate(source):
if i != 0:
kwargs['append'] = True
saver(cube, target, **kwargs)
# Netcdf saver.
else:
saver(source, target, **kwargs)
else:
raise ValueError("Cannot save; non Cube found in source")
|
jswanljung/iris
|
lib/iris/io/__init__.py
|
Python
|
lgpl-3.0
| 15,376
|
[
"NetCDF"
] |
92a6cf6f3ca65c573c5f25c6f30b634f2ae9e6b0dd551f2f2cc78412c03cd718
|
import csv,os,os.path,sys
from time import strftime
from collections import OrderedDict
import argparse
import dotenv
import math
from constants import OMOP_CONSTANTS, OMOP_MAPPING_RECORD, BENEFICIARY_SUMMARY_RECORD, OMOP_CONCEPT_RECORD, OMOP_CONCEPT_RELATIONSHIP_RECORD
from utility_classes import Table_ID_Values
from beneficiary import Beneficiary
from FileControl import FileControl
from SynPufFiles import PrescriptionDrug, InpatientClaim, OutpatientClaim, CarrierClaim
from datetime import date
import calendar
# ------------------------
# TODO: polish for updating to OHDSI (doc strings, testing, comments, pylint, etc)
#
# ------------------------
# ------------------------
# This python script creates the OMOP CDM v5 tables from the CMS SynPuf (Synthetic Public Use Files).
# ------------------------
#
# Input Required:
# OMOP Vocabulary v5 Concept file. Remember to run: java -jar cpt4.jar (appends CPT4 concepts from concept_cpt4.csv to CONCEPT.csv)
# BASE_OMOP_INPUT_DIRECTORY / CONCEPT.csv
# / CONCEPT_RELATIONSHIP.csv
#
#
# SynPuf data files
# BASE_SYNPUF_INPUT_DIRECTORY
# / DE1_0_2008_Beneficiary_Summary_File_Sample_<sample_number>.csv
# / DE1_0_2009_Beneficiary_Summary_File_Sample_<sample_number>.csv
# / DE1_0_2010_Beneficiary_Summary_File_Sample_<sample_number>.csv
# / DE1_0_2008_to_2010_Carrier_Claims_Sample_<sample_number>_A.csv
# / DE1_0_2008_to_2010_Carrier_Claims_Sample_<sample_number>_B.csv
# / DE1_0_2008_to_2010_Inpatient_Claims_Sample_<sample_number>_B.csv
# / DE1_0_2008_to_2010_Outpatient_Claims_Sample_<sample_number>_B.csv
# / DE1_0_2008_to_2010_Prescription_Drug_Events_Sample_<sample_number>_B.csv
#
# Output Produced:
# Last-used concept_IDs for CDM v5 tables
# BASE_OUTPUT_DIRECTORY / etl_synpuf_last_table_ids.txt
# / npi_provider_id.txt
# / provider_id_care_site.txt
# / location_dictionary.csv
#
# SynPuf Beneficiary Files with year prefix
# BASE_SYNPUF_INPUT_DIRECTORY
# / DE1_0_comb_Beneficiary_Summary_File_Sample_<sample_number>.csv
# / DE1_0_comb_Beneficiary_Summary_File_Sample_<sample_number>.csv.srt
# / DE1_0_2008_to_2010_Carrier_Claims_Sample_<sample_number>.csv.srt
# / DE1_0_2008_to_2010_Inpatient_Claims_Sample_<sample_number>.csv.srt
# / DE1_0_2008_to_2010_Outpatient_Claims_Sample_<sample_number>.csv.srt
# / DE1_0_2008_to_2010_Prescription_Drug_Events_Sample_<sample_number>.csv.srt
#
#
# OMOP CDM v5 Tables
# BASE_OUTPUT_DIRECTORY / care_site_<sample_number>.csv
# / condition_occurrence_<sample_number>.csv
# / death_<sample_number>.csv
# / device_cost_<sample_number>.csv
# / device_exposure_<sample_number>.csv
# / drug_cost_<sample_number>.csv
# / drug_exposure_<sample_number>.csv
# / location_<sample_number>.csv
# / measurement_occurrence_<sample_number>.csv
# / observation_<sample_number>.csv
# / observation_period_<sample_number>.csv
# / payer_plan_period_<sample_number>.csv
# / person_<sample_number>.csv
# / procedure_cost_<sample_number>.csv
# / procedure_occurrence_<sample_number>.csv
# / provider_<sample_number>.csv
# / specimen_<sample_number>.csv
# / visit_cost_<sample_number>.csv
# / visit_occurrence_<sample_number>.csv
#
#
# ** Various debug and log files
#
# ------------------------
# ------------------------
# 2015-02-05 C. Dougherty Created
#
# 2016-06-17 Christophe Lambert, Praveen Kumar, Amritansh -- University of New Mexico -- Major overhaul
# ------------------------
dotenv.load_dotenv(".env")
# -----------------------------------
# - Configuration
# -----------------------------------
# ---------------------------------
# Edit your .env file to change which directories to use in the ETL process
# Path to the directory where control files should be saved (input/output
BASE_ETL_CONTROL_DIRECTORY = os.environ['BASE_ETL_CONTROL_DIRECTORY']
# Path to the directory containing the downloaded SynPUF files
BASE_SYNPUF_INPUT_DIRECTORY = os.environ['BASE_SYNPUF_INPUT_DIRECTORY']
# Path to the directory containing the OMOP Vocabulary v5 files (can be downloaded from http://www.ohdsi.org/web/athena/)
BASE_OMOP_INPUT_DIRECTORY = os.environ['BASE_OMOP_INPUT_DIRECTORY']
# Path to the directory where CDM-compatible CSV files should be saved
BASE_OUTPUT_DIRECTORY = os.environ['BASE_OUTPUT_DIRECTORY']
# SynPUF dir format. I've seen DE1_{0} and DE_{0} as different prefixes for the name of the directory containing a slice of SynPUF data
SYNPUF_DIR_FORMAT = os.environ['SYNPUF_DIR_FORMAT']
DESTINATION_FILE_DRUG = 'drug'
DESTINATION_FILE_CONDITION = 'condition'
DESTINATION_FILE_PROCEDURE = 'procedure'
DESTINATION_FILE_OBSERVATION = 'observation'
DESTINATION_FILE_MEASUREMENT = 'measurement'
DESTINATION_FILE_DEVICE = 'device'
DESTINATION_FILE_VISIT = 'visit'
class SourceCodeConcept(object):
def __init__(self, source_concept_code, source_concept_id, target_concept_id, destination_file):
self.source_concept_code = source_concept_code
self.source_concept_id = source_concept_id
self.target_concept_id = target_concept_id
self.destination_file = destination_file
# -----------------------------------
# Globals
# -----------------------------------
file_control = None
table_ids = None
source_code_concept_dict = {} # stores source and target concept ids + destination file
concept_relationship_dict = {} # stores the source concept id and its mapped target concept id
person_location_dict = {} # stores location_id for a given state + county
current_stats_filename = ''
#This was used to detect death via ICD9 codes, but since death information is
#listed in the beneficiary file, we will not use. Plus this isn't even a complete list
#icd9_codes_death = ['761.6', '798', '798.0', '798.1', '798.2','798.9', '799.9', 'E913.0','E913.1','E913.2','E913.3','E913.8','E913.9', 'E978']
provider_id_care_site_id = {} # sotres care site id for a provider_num(institution)
visit_id_list = set() # stores unique visit ids written to visit occurrence file
visit_occurrence_ids = OrderedDict() # stores visit ids generated by determine_visits function
npi_provider_id = {} # stores provider id for an npi
#-------------------------------------------------------------------------------
# SSA codes for Puerto Rico('40') and Virgin Islands ('48') have not been added
# to the following dictionary. SSA code '54' is for others where others=
# PUERTO RICO, VIRGIN ISLANDS, AFRICA, ASIA OR CALIFORNIA; INSTITUTIONAL PROVIDER
# OF SERVICES (IPS) ONLY, CANADA & ISLANDS, CENTRAL AMERICA AND WEST INDIES,
# EUROPE, MEXICO, OCEANIA, PHILIPPINES, SOUTH AMERICA, U.S. POSSESSIONS, AMERICAN
# SAMOA, GUAM, SAIPAN OR NORTHERN MARIANAS, TEXAS; INSTITUTIONAL PROVIDER OF SERVICES
# (IPS) ONLY, NORTHERN MARIANAS, GUAM, UNKNOWN.
#-------------------------------------------------------------------------------
SSA_state_codes = {
'01':'AL',
'02':'AK',
'03':'AZ',
'04':'AR',
'05':'CA',
'06':'CO',
'07':'CT',
'08':'DE',
'09':'DC',
'10':'FL',
'11':'GA',
'12':'HI',
'13':'ID',
'14':'IL',
'15':'IN',
'16':'IA',
'17':'KS',
'18':'KY',
'19':'LA',
'20':'ME',
'21':'MD',
'22':'MA',
'23':'MI',
'24':'MN',
'25':'MS',
'26':'MO',
'27':'MT',
'28':'NE',
'29':'NV',
'30':'NH',
'31':'NJ',
'32':'NM',
'33':'NY',
'34':'NC',
'35':'ND',
'36':'OH',
'37':'OK',
'38':'OR',
'39':'PA',
'41':'RI',
'42':'SC',
'43':'SD',
'44':'TN',
'45':'TX',
'46':'UT',
'47':'VT',
'49':'VA',
'50':'WA',
'51':'WV',
'52':'WI',
'53':'WY',
'54':'54'}
domain_destination_file_list = {
'Condition' : DESTINATION_FILE_CONDITION,
'Condition/Meas' : DESTINATION_FILE_MEASUREMENT,
'Condition/Obs' : DESTINATION_FILE_OBSERVATION,
'Condition/Procedure' : DESTINATION_FILE_PROCEDURE,
'Device' : DESTINATION_FILE_DEVICE,
'Device/Obs' : DESTINATION_FILE_OBSERVATION,
'Device/Procedure' : DESTINATION_FILE_PROCEDURE,
'Drug' : DESTINATION_FILE_DRUG,
'Measurement' : DESTINATION_FILE_MEASUREMENT,
'Meas/Procedure' : DESTINATION_FILE_PROCEDURE,
'Obs/Procedure' : DESTINATION_FILE_PROCEDURE,
'Observation' : DESTINATION_FILE_OBSERVATION,
'Procedure' : DESTINATION_FILE_PROCEDURE,
'Visit' : DESTINATION_FILE_VISIT,
'Place of Service' : DESTINATION_FILE_VISIT,
'Meas Value' : DESTINATION_FILE_MEASUREMENT
}
# -----------------------------------
# get timestamp
# -----------------------------------
def get_timestamp():
return strftime("%Y-%m-%d %H:%M:%S")
# -----------------------------------
# TODO: use standard python logger...
# -----------------------------------
def log_stats(msg):
print msg
global current_stats_filename
with open(current_stats_filename,'a') as fout:
fout.write('[{0}]{1}\n'.format(get_timestamp(),msg))
# -----------------------------------
# format date in YYYYMMDD
# -----------------------------------
def get_date_YYYY_MM_DD(date_YYYYMMDD):
if len(date_YYYYMMDD) == 0:
return ''
return '{0}-{1}-{2}'.format(date_YYYYMMDD[0:4], date_YYYYMMDD[4:6], date_YYYYMMDD[6:8])
# -----------------------------------------------------------------------------------------------------
# Each provider_num (institution) has a unique care_site_id. It is generated by the following code by
# adding 1 to previous care_site_id.
# -------------------------------------------------------------------------------------------------------
def get_CareSite(provider_num):
global table_ids
if provider_num not in provider_id_care_site_id:
provider_id_care_site_id[provider_num] = [table_ids.last_care_site_id,0]
table_ids.last_care_site_id += 1
return provider_id_care_site_id[provider_num][0]
# -------------------------------------------------------------------------
# A unique provider_id for each npi is generated by adding 1 to the previous provider_id
# --------------------------------------------------------------------------
def get_Provider(npi):
global table_ids
if npi not in npi_provider_id:
npi_provider_id[npi] = [table_ids.last_provider_id,0]
table_ids.last_provider_id += 1
return npi_provider_id[npi][0]
# --------------------------------------------------------------------------------------------------
# A unique location id for each unique combination of state+county is generated by adding 1 to
# the previous location id
# ------------------------------------------------------------------------------------------------
def get_location_id(state_county):
global table_ids
if state_county not in person_location_dict:
person_location_dict[state_county] = [table_ids.last_location_id,0]
table_ids.last_location_id += 1
return person_location_dict[state_county][0]
# -----------------------------------
# This function produces dictionaries that give mappings between SynPUF codes and OMOP concept_ids
# -----------------------------------
def build_maps():
log_stats('-'*80)
log_stats('build_maps starting...')
#--------------------------------------------------------------------------------------
# load existing person_location_dict. v5
# It populates the dictionary with the existing data so that the subsequent run of this
# program doesn't generate the duplicate location_id.
#--------------------------------------------------------------------------------------
recs_in = 0
global table_ids
global person_location_dict
location_dict_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,"location_dictionary.txt")
if os.path.exists(location_dict_file):
log_stats('reading existing location_dict_file ->' + location_dict_file)
with open(location_dict_file,'r') as fin:
for rec in fin:
recs_in += 1
flds = (rec[:-1]).split('\t')
if len(flds) == 2:
state_county = flds[0]
location_id = flds[1]
location_id = location_id.lstrip('[').rstrip(']').split(',') #convert string to list as the file data is string
location_id = [int(location_id[0]), int(location_id[1])] # convert the data in the list to integer
person_location_dict[state_county] = location_id
log_stats('done, recs_in={0}, len person_location_dict={1}'.format(recs_in, len(person_location_dict)))
else:
log_stats('No existing location_dict_file found (looked for ->' + location_dict_file + ')')
#----------------
# load existing provider_id_care_site_id.
# It populates the dictionary with the existing data so that the subsequent run of this
# program doesn't generate the duplicate care_site_id.
#----------------
recs_in = 0
global table_ids
global provider_id_care_site_id
provider_id_care_site_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'provider_id_care_site.txt')
if os.path.exists(provider_id_care_site_file):
log_stats('reading existing provider_id_care_site_file ->' + provider_id_care_site_file)
with open(provider_id_care_site_file,'r') as fin:
for rec in fin:
recs_in += 1
flds = (rec[:-1]).split('\t')
if len(flds) == 2:
provider_num = flds[0]
care_site_id = flds[1]
care_site_id = care_site_id.lstrip('[').rstrip(']').split(',') #convert string to list as the file data is string
care_site_id = [int(care_site_id[0]), int(care_site_id[1])] # convert the data in the list to integer
provider_id_care_site_id[provider_num] = care_site_id
log_stats('done, recs_in={0}, len provider_id_care_site_id={1}'.format(recs_in, len(provider_id_care_site_id)))
else:
log_stats('No existing provider_id_care_site_file found (looked for ->' + provider_id_care_site_file + ')')
#----------------
# load existing npi_provider_id
# It populates the dictionary with the existing data so that the subsequent run of this
# program doesn't generate the duplicate provider_id.
#----------------
recs_in = 0
global npi_provider_id
npi_provider_id_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'npi_provider_id.txt')
if os.path.exists(npi_provider_id_file):
log_stats('reading existing npi_provider_id_file ->' + npi_provider_id_file)
with open(npi_provider_id_file,'r') as fin:
for rec in fin:
recs_in += 1
flds = (rec[:-1]).split('\t')
if len(flds) == 2:
npi = flds[0]
provider_id = flds[1]
provider_id = provider_id.lstrip('[').rstrip(']').split(',') #convert string to list as the file data is string
provider_id = [int(provider_id[0]), int(provider_id[1])] # convert the data in the list to integer
npi_provider_id[npi] = provider_id
log_stats('done, recs_in={0}, len npi_provider_id={1}'.format(recs_in, len(npi_provider_id_file)))
else:
log_stats('No existing npi_provider_id_file found (looked for ->' + npi_provider_id_file + ')')
#----------------
# Load the OMOP v5 Concept file to build the source code to conceptID xref.
# NOTE: This version of the flat file had embedded newlines. This code handles merging the split
# records. This may not be needed when the final OMOP v5 Concept file is produced.
#----------------
omop_concept_relationship_debug_file = os.path.join(BASE_OUTPUT_DIRECTORY,'concept_relationship_debug_log.txt')
omop_concept_relationship_file = os.path.join(BASE_OMOP_INPUT_DIRECTORY,'CONCEPT_RELATIONSHIP.csv')
omop_concept_debug_file = os.path.join(BASE_OUTPUT_DIRECTORY,'concept_debug_log.txt')
omop_concept_file = os.path.join(BASE_OMOP_INPUT_DIRECTORY,'CONCEPT.csv')
recs_in = 0
recs_skipped = 0
log_stats('Reading omop_concept_relationship_file -> ' + omop_concept_relationship_file)
log_stats('Writing to log file -> ' + omop_concept_relationship_debug_file)
with open(omop_concept_relationship_file,'r') as fin, \
open(omop_concept_relationship_debug_file, 'w') as fout_log:
fin.readline() #skip header
for rec in fin:
recs_in += 1
if recs_in % 100000 == 0: print 'omop concept relationship recs=',recs_in
flds = (rec[:-1]).split('\t')
if len(flds) == OMOP_CONCEPT_RELATIONSHIP_RECORD.fieldCount:
concept_id1 = flds[OMOP_CONCEPT_RELATIONSHIP_RECORD.CONCEPT_ID_1]
concept_id2 = flds[OMOP_CONCEPT_RELATIONSHIP_RECORD.CONCEPT_ID_2]
relationship_id = flds[OMOP_CONCEPT_RELATIONSHIP_RECORD.RELATIONSHIP_ID]
invalid_reason = flds[OMOP_CONCEPT_RELATIONSHIP_RECORD.INVALID_REASON]
if concept_id1 != '' and concept_id2 != '' and relationship_id == "Maps to" and invalid_reason == '':
if concept_relationship_dict.has_key(concept_id1): # one concept id might have several mapping, so values are stored as list
concept_relationship_dict[concept_id1].append(concept_id2)
else:
concept_relationship_dict[concept_id1] = [concept_id2]
else:
recs_skipped = recs_skipped + 1
log_stats('Done, omop concept recs_in = ' + str(recs_in))
log_stats('recs_skipped = ' + str(recs_skipped))
log_stats('len source_code_concept_dict = ' + str(len(source_code_concept_dict)))
recs_in = 0
recs_skipped = 0
merged_recs=0
recs_checked=0
#TODO: there is an overlap of 41 2-character codes that are the same between CPT4 and HCPCS,
#but map to different OMOP concepts. Need to determine which should prevail. Whichever prevails should call one of the next 2 code blocks first.
log_stats('Reading omop_concept_file -> ' + omop_concept_file)
log_stats('Writing to log file -> ' + omop_concept_debug_file)
#First pass to obtain domain ids of concepts
domain_dict = {}
with open(omop_concept_file,'r') as fin:
fin.readline()
for rec in fin:
flds = (rec[:-1]).split('\t')
if len(flds) == OMOP_CONCEPT_RECORD.fieldCount:
concept_id = flds[OMOP_CONCEPT_RECORD.CONCEPT_ID]
domain_id = flds[OMOP_CONCEPT_RECORD.DOMAIN_ID]
domain_dict[concept_id] = domain_id
print "loaded domain dict with this many records: ", len(domain_dict)
with open(omop_concept_file,'r') as fin, \
open(omop_concept_debug_file, 'w') as fout_log:
# open(omop_concept_file_mini, 'w') as fout_mini:
fin.readline() #skip header
for rec in fin:
recs_in += 1
if recs_in % 100000 == 0: print 'omop concept recs=',recs_in
flds = (rec[:-1]).split('\t')
if len(flds) == OMOP_CONCEPT_RECORD.fieldCount:
concept_id = flds[OMOP_CONCEPT_RECORD.CONCEPT_ID]
concept_code = original_concept_code = flds[OMOP_CONCEPT_RECORD.CONCEPT_CODE].replace(".","")
vocabulary_id = flds[OMOP_CONCEPT_RECORD.VOCABULARY_ID]
if vocabulary_id == OMOP_CONSTANTS.CPT4_VOCABULARY_ID:
vocabulary_id = OMOP_CONSTANTS.HCPCS_VOCABULARY_ID
if(vocabulary_id in [OMOP_CONSTANTS.ICD_9_DIAGNOSIS_VOCAB_ID,OMOP_CONSTANTS.ICD_9_PROCEDURES_VOCAB_ID]):
vocabulary_id = OMOP_CONSTANTS.ICD_9_VOCAB_ID
domain_id = flds[OMOP_CONCEPT_RECORD.DOMAIN_ID]
invalid_reason = flds[OMOP_CONCEPT_RECORD.INVALID_REASON]
status = ''
if concept_id != '':
if vocabulary_id in [OMOP_CONSTANTS.ICD_9_VOCAB_ID,
OMOP_CONSTANTS.HCPCS_VOCABULARY_ID,
OMOP_CONSTANTS.NDC_VOCABULARY_ID]:
recs_checked += 1
if not concept_relationship_dict.has_key(concept_id):
if not domain_destination_file_list.has_key(domain_id):
status = "No destination defined for domain " + domain_id + " of concept " + concept_id
else:
destination_file = domain_destination_file_list[domain_id]
if( vocabulary_id == OMOP_CONSTANTS.ICD_9_VOCAB_ID):
status = "No map from ICD9 code, or code invalid for " + concept_id
recs_skipped += 1
if( vocabulary_id == OMOP_CONSTANTS.HCPCS_VOCABULARY_ID):
status = "No self map from OMOP (HCPCS/CPT4) to OMOP (HCPCS/CPT4) or code invalid for " + concept_id
recs_skipped += 1
if( vocabulary_id == OMOP_CONSTANTS.NDC_VOCABULARY_ID):
status = "No map from OMOP (NDC) to OMOP (RxNorm) or code invalid for " + concept_id
recs_skipped += 1
source_code_concept_dict[vocabulary_id,concept_code] = [SourceCodeConcept(concept_code, concept_id, "0", destination_file)]
else:
source_code_concept_dict[vocabulary_id,concept_code] = []
for concept in concept_relationship_dict[concept_id]:
if not domain_destination_file_list.has_key(domain_dict[concept]):
status = "No destination defined for domain " + domain_dict[concept] + " of concept " + concept_id
else:
destination_file = domain_destination_file_list[domain_dict[concept]]
source_code_concept_dict[vocabulary_id,concept_code].append(SourceCodeConcept(concept_code, concept_id, concept, destination_file))
if status != '':
fout_log.write(status + ': \t')
# for fld in line: fout_log.write(fld + '\t')
fout_log.write(rec + '\n')
log_stats('Done, omop concept recs_in = ' + str(recs_in))
log_stats('recs_checked = ' + str(recs_checked))
log_stats('recs_skipped = ' + str(recs_skipped))
log_stats('merged_recs = ' + str(merged_recs))
log_stats('len source_code_concept_dict = ' + str(len(source_code_concept_dict)))
#---------------------------
# -----------------------------------
# write the provider_num(institution) + care_site_id to provider_id_care_site.txt file.
# write the npi + provider_id to npi_provider_id.txt file.
# the data from these two files are loaded to dictionaries before processing the input
# records to make sure that the duplicate records are not written to care_site and provider files.
# -----------------------------------
def persist_lookup_tables():
recs_out = 0
location_dict_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'location_dictionary.txt')
log_stats('writing location_dict_file ->' + location_dict_file)
with open(location_dict_file,'w') as fout:
for state_county, location_id in person_location_dict.items():
fout.write('{0}\t{1}\n'.format(state_county, location_id))
recs_out += 1
log_stats('done, recs_out={0}, len person_location_dict={1}'.format(recs_out, len(person_location_dict)))
recs_out = 0
provider_id_care_site_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'provider_id_care_site.txt')
log_stats('writing provider_id_care_site_file ->' + provider_id_care_site_file)
with open(provider_id_care_site_file,'w') as fout:
for provider_num, care_site_id in provider_id_care_site_id.items():
fout.write('{0}\t{1}\n'.format(provider_num, care_site_id))
recs_out += 1
log_stats('done, recs_out={0}, len provider_id_care_site_id={1}'.format(recs_out, len(provider_id_care_site_id)))
recs_out = 0
npi_provider_id_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'npi_provider_id.txt')
log_stats('writing npi_provider_id_file ->' + npi_provider_id_file)
with open(npi_provider_id_file,'w') as fout:
for npi, provider_id in npi_provider_id.items():
fout.write('{0}\t{1}\n'.format(npi, provider_id))
recs_out += 1
log_stats('done, recs_out={0}, len npi_provider_id={1}'.format(recs_out, len(npi_provider_id)))
# ------------------------------------------------------------------------------------------------------------------------
# Logic to determine visits. visit_dates is used to determine the start and end date of observation period for a beneficiary.
# visit_occurrence_ids keeps track of unique visits.
# -------------------------------------------------------------------------------------------------------------------------
def determine_visits(bene):
# each unique date gets a visit id
visit_id = table_ids.last_visit_occurrence_id
#For death records just track dates for purpose of observation_period
yd = bene.LatestYearData()
if yd is not None and yd.BENE_DEATH_DT != '':
bene.visit_dates[yd.BENE_DEATH_DT] = visit_id
#For prescription records just track dates for purpose of observation_period
for raw_rec in bene.prescription_records:
rec = PrescriptionDrug(raw_rec)
if rec.SRVC_DT == '':
continue
bene.visit_dates[rec.SRVC_DT] = visit_id
#For inpatient records, if same patient, same date range, and same provider institution number, is same visit
for raw_rec in bene.inpatient_records:
rec = InpatientClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
if not visit_occurrence_ids.has_key((rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM)):
bene.visit_dates[rec.CLM_FROM_DT] = visit_id
bene.visit_dates[rec.CLM_THRU_DT] = visit_id
visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM] = visit_id
visit_id+=1
#For outpatient records, if same patient, same date range, and same provider institution number, is same visit
for raw_rec in bene.outpatient_records:
rec = OutpatientClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
if not visit_occurrence_ids.has_key((rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM)):
bene.visit_dates[rec.CLM_FROM_DT] = visit_id
bene.visit_dates[rec.CLM_THRU_DT] = visit_id
visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM] = visit_id
visit_id+=1
#For carrier claims, if same patient, same date range, and same institution tax number, is same visit
for raw_rec in bene.carrier_records:
rec = CarrierClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
if not visit_occurrence_ids.has_key((rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.TAX_NUM)):
bene.visit_dates[rec.CLM_FROM_DT] = visit_id
bene.visit_dates[rec.CLM_THRU_DT] = visit_id
visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.TAX_NUM] = visit_id
visit_id+=1
table_ids.last_visit_occurrence_id = visit_id #store the last_visit_occurrence_id
# -----------------------------------
# CDM v5 Person - Write person records
# -----------------------------------
def write_person_record(beneficiary):
person_fd = file_control.get_Descriptor('person')
yd = beneficiary.LatestYearData()
if yd is None: return
person_fd.write('{0},'.format(beneficiary.person_id)) # person_id
if int(yd.BENE_SEX_IDENT_CD) == 1: # gender_concept_id
person_fd.write('{0},'.format(OMOP_CONSTANTS.GENDER_MALE))
elif int(yd.BENE_SEX_IDENT_CD) == 2:
person_fd.write('{0},'.format(OMOP_CONSTANTS.GENDER_FEMALE))
else:
person_fd.write('0,')
person_fd.write('{0},'.format(yd.BENE_BIRTH_DT[0:4])) # year_of_birth
person_fd.write('{0},'.format(yd.BENE_BIRTH_DT[4:6])) # month_of_birth
person_fd.write('{0},'.format(yd.BENE_BIRTH_DT[6:8])) # day_of_birth
person_fd.write(',') # time_of_birth
#print ("yd.BENE_RACE_CD: " + str(yd.BENE_RACE_CD))
if int(yd.BENE_RACE_CD) == 1: #White # race_concept_id and ethnicity_concept_id
person_fd.write('{0},'.format(OMOP_CONSTANTS.RACE_WHITE))
person_fd.write('{0},'.format(OMOP_CONSTANTS.ETHNICITY_NON_HISPANIC))
elif int(yd.BENE_RACE_CD) == 2: #Black
person_fd.write('{0},'.format(OMOP_CONSTANTS.RACE_BLACK))
person_fd.write('{0},'.format(OMOP_CONSTANTS.ETHNICITY_NON_HISPANIC))
elif int(yd.BENE_RACE_CD) == 3: #Others
person_fd.write('{0},'.format(OMOP_CONSTANTS.RACE_OTHER))
person_fd.write('{0},'.format(OMOP_CONSTANTS.ETHNICITY_NON_HISPANIC))
elif int(yd.BENE_RACE_CD) == 5: #Hispanic
person_fd.write('{0},'.format(OMOP_CONSTANTS.RACE_NON_WHITE))
person_fd.write('{0},'.format(OMOP_CONSTANTS.ETHNICITY_HISPANIC))
else:
person_fd.write('0,')
person_fd.write('0,')
#write person records to the person file
state_county = str(beneficiary.SP_STATE_CODE) + '-' + str(beneficiary.BENE_COUNTY_CD)
current_location_id = get_location_id(state_county) # get the location id for the given pair of state & county
person_fd.write('{0},'.format(current_location_id)) # location_id
person_fd.write(',') # provider_id
person_fd.write(',') # care_site_id
person_fd.write('{0},'.format(beneficiary.DESYNPUF_ID)) # person_source_value
person_fd.write('{0},'.format(yd.BENE_SEX_IDENT_CD)) # gender_source_value
person_fd.write(',') # gender_source_concept_id
person_fd.write('{0},'.format(yd.BENE_RACE_CD)) # race_source_value
person_fd.write(',') # race_source_concept_id
person_fd.write('{0},'.format(yd.BENE_RACE_CD)) # ethnicity_source_value
#person_fd.write('') # ethnicity_source_concept_id
person_fd.write('\n')
person_fd.increment_recs_written(1)
# ----------------------------------------------------
# Write payer plan period records for each beneficiary
# ----------------------------------------------------
def write_payer_plan_period_record(beneficiary):
payer_plan_period_fd = file_control.get_Descriptor('payer_plan_period')
plan_source_value_list = ["Medicare Part A", "Medicare Part B", "HMO", "Medicare Part D"]
ppyd = beneficiary.PayerPlanPerioYearDict() # for all 3 years, get the number of months for each plan
if not bool(ppyd):
return # dictionary is empty
else:
'''
for k,v in ppyd.iteritems():
if k[1] == 'BENE_HI_CVRAGE_TOT_MONS': #plan A
planA[k[0]] = v
if k[1] == 'BENE_SMI_CVRAGE_TOT_MONS': #plan B
planB[k[0]] = v
if k[1] == 'BENE_HMO_CVRAGE_TOT_MONS': #HMO
hmo[k[0]] = v
if k[1] == 'PLAN_CVRG_MOS_NUM': #plan D
planD[k[0]] = v
'''
for plan_source_value in plan_source_value_list:
if plan_source_value == "Medicare Part A":
nd = {k[0]:v for k,v in ppyd.iteritems() if k[1] == 'BENE_HI_CVRAGE_TOT_MONS'} # new dictionary with year as key and value as val
payer_plan_period_dates = get_payer_plan_period_date_list(nd)
for i in range(len(payer_plan_period_dates)):
payer_plan_period_start_date = payer_plan_period_dates[i][0]
payer_plan_period_end_date = payer_plan_period_dates[i][1]
plan_source_value = "Medicare Part A"
write_to_payer_plan_period_file(payer_plan_period_fd, beneficiary.person_id, payer_plan_period_start_date, payer_plan_period_end_date, plan_source_value)
elif plan_source_value == "Medicare Part B":
nd = {k[0]:v for k,v in ppyd.iteritems() if k[1] == 'BENE_SMI_CVRAGE_TOT_MONS'} # new dictionary with year as key and value as val
payer_plan_period_dates = get_payer_plan_period_date_list(nd)
for i in range(len(payer_plan_period_dates)):
payer_plan_period_start_date = payer_plan_period_dates[i][0]
payer_plan_period_end_date = payer_plan_period_dates[i][1]
plan_source_value = "Medicare Part B"
write_to_payer_plan_period_file(payer_plan_period_fd, beneficiary.person_id, payer_plan_period_start_date, payer_plan_period_end_date, plan_source_value)
elif plan_source_value == "Medicare Part D":
nd = {k[0]:v for k,v in ppyd.iteritems() if k[1] == 'PLAN_CVRG_MOS_NUM'} # new dictionary with year as key and value as val
payer_plan_period_dates = get_payer_plan_period_date_list(nd)
for i in range(len(payer_plan_period_dates)):
payer_plan_period_start_date = payer_plan_period_dates[i][0]
payer_plan_period_end_date = payer_plan_period_dates[i][1]
plan_source_value = "Medicare Part D"
write_to_payer_plan_period_file(payer_plan_period_fd, beneficiary.person_id, payer_plan_period_start_date, payer_plan_period_end_date, plan_source_value)
elif plan_source_value == "HMO":
nd = {k[0]:v for k,v in ppyd.iteritems() if k[1] == 'BENE_HMO_CVRAGE_TOT_MONS'} # new dictionary with year as key and value as val
payer_plan_period_dates = get_payer_plan_period_date_list(nd)
for i in range(len(payer_plan_period_dates)):
payer_plan_period_start_date = payer_plan_period_dates[i][0]
payer_plan_period_end_date = payer_plan_period_dates[i][1]
plan_source_value = "HMO"
write_to_payer_plan_period_file(payer_plan_period_fd, beneficiary.person_id, payer_plan_period_start_date, payer_plan_period_end_date, plan_source_value)
#------------------------------------------------------
# write payer plan period data to the file
#--------------------------------------------------------
def write_to_payer_plan_period_file(payer_plan_period_fd, person_id, payer_plan_period_start_date, payer_plan_period_end_date, plan_source_value):
payer_plan_period_fd.write('{0},'.format(table_ids.last_payer_plan_period_id)) # payer_plan_period_id
payer_plan_period_fd.write('{0},'.format(person_id)) # person_id
payer_plan_period_fd.write('{0},'.format(payer_plan_period_start_date)) # payer_plan_period_start_date
payer_plan_period_fd.write('{0},'.format(payer_plan_period_end_date)) # payer_plan_period_end_date
payer_plan_period_fd.write(',') # payer_source_value
payer_plan_period_fd.write('{0},'.format(plan_source_value)) # plan_source_value
payer_plan_period_fd.write('') # family_source_value
payer_plan_period_fd.write('\n')
payer_plan_period_fd.increment_recs_written(1)
table_ids.last_payer_plan_period_id += 1
#----------------------------------------------------------------
# generate the list of payer_plan_period start date and end date.
# date_list will be in this format date_list = [(d1,d2),(d1,d2)]
#-----------------------------------------------------------------
def get_payer_plan_period_date_list(plan):
date_list = []
# check if any year is missing. If yes, add that year. This will prevent dictionary keyError at runtime.
for year in ['2008','2009','2010']:
if year not in plan:
plan[year] = 0
# determine the start and end date for payer plan period
if plan['2008'] == 12 and plan['2009'] == 12 and plan['2010'] == 12:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = '2010-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] == 12 and plan['2009'] == 12 and plan['2010'] < 12:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,12,31), plan['2010'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] == 12 and plan['2009'] < 12 and plan['2010'] == 12:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = '2008-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2009'] > 0:
payer_plan_period_start_date = '2009-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,01,01), plan['2009'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
payer_plan_period_start_date = '2010-01-01'
payer_plan_period_end_date = '2010-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] == 12 and plan['2009'] < 12 and plan['2010'] < 12:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = '2008-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2009'] > 0:
payer_plan_period_start_date = '2009-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,01,01), plan['2009'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2010'] > 0:
payer_plan_period_start_date = '2010-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2010,01,01), plan['2010'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] < 12 and plan['2009'] == 12 and plan['2010'] == 12:
if plan['2008'] == 0:
payer_plan_period_start_date = '2009-01-01'
else:
payer_plan_period_start_date = get_payer_plan_period_date(date(2008,12,31), -1*plan['2008'])
payer_plan_period_end_date = '2010-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] < 12 and plan['2009'] == 12 and plan['2010'] < 12:
if plan['2008'] == 0:
payer_plan_period_start_date = '2009-01-01'
else:
payer_plan_period_start_date = get_payer_plan_period_date(date(2008,12,31), -1*plan['2008'])
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,12,31), plan['2010'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] < 12 and plan['2009'] < 12 and plan['2010'] == 12:
if plan['2008'] > 0:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2008,01,01), plan['2008'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2009'] > 0:
payer_plan_period_start_date = '2009-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,01,01), plan['2009'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
payer_plan_period_start_date = '2010-01-01'
payer_plan_period_end_date = '2010-12-31'
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
elif plan['2008'] < 12 and plan['2009'] < 12 and plan['2010'] < 12:
if plan['2008'] > 0:
payer_plan_period_start_date = '2008-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2008,01,01), plan['2008'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2009'] > 0:
payer_plan_period_start_date = '2009-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2009,01,01), plan['2009'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
if plan['2010'] > 0:
payer_plan_period_start_date = '2010-01-01'
payer_plan_period_end_date = get_payer_plan_period_date(date(2010,01,01), plan['2010'])
date_list.append((payer_plan_period_start_date, payer_plan_period_end_date))
return date_list
#---------------------------------------------------------------------
# use the start/end date and number of months(delta) to calculate the
# end/start date
#--------------------------------------------------------------------
def get_payer_plan_period_date(date, delta):
m, y = (date.month+delta) % 12, date.year + ((date.month)+delta-1) // 12 # calculate new month and year
if m == 0:
m = 12
d = min(date.day, calendar.monthrange(y, m)[1]) # get the last date of the month
return date.replace(day=d,month=m, year=y) # return the new date
# -----------------------------------
# Write Location records
# -----------------------------------
def write_location_record(beneficiary):
state_county = str(beneficiary.SP_STATE_CODE) + '-' + str(beneficiary.BENE_COUNTY_CD)
current_location_id = get_location_id(state_county) # get the location id for the given pair of state & county
idx = person_location_dict[state_county][1]
if idx == 0:
location_fd = file_control.get_Descriptor('location')
location_fd.write('{0},'.format(current_location_id)) # location_id
location_fd.write(',')
location_fd.write(',')
location_fd.write(',')
try:
location_fd.write('{0},'.format(SSA_state_codes[beneficiary.SP_STATE_CODE])) # state_code - if SSA code is present in the dictionary
except:
location_fd.write('{0},'.format(beneficiary.SP_STATE_CODE)) # if SSA code is not present in the dictionary
location_fd.write(',')
if len(beneficiary.SP_STATE_CODE) == 1: # convert to 2 bytes
beneficiary.SP_STATE_CODE = '0' + str(beneficiary.SP_STATE_CODE)
if len(beneficiary.BENE_COUNTY_CD) == 1: # convert to 3 bytes
beneficiary.BENE_COUNTY_CD = '00' + str(beneficiary.BENE_COUNTY_CD)
elif len(beneficiary.BENE_COUNTY_CD) == 2: # convert to 3 bytes
beneficiary.BENE_COUNTY_CD = '0' + str(beneficiary.BENE_COUNTY_CD)
local_county_code = str(beneficiary.SP_STATE_CODE) + str(beneficiary.BENE_COUNTY_CD)
location_fd.write('{0},'.format(local_county_code)) # county_code
location_fd.write('{0}'.format(beneficiary.LOCATION_ID)) # location_source_value
location_fd.write('\n')
location_fd.increment_recs_written(1)
person_location_dict[state_county] = [person_location_dict[state_county][0],1] # change the status to written
# -----------------------------------
# Observation Period
# -----------------------------------
def write_observation_period_records(beneficiary):
#There are beneficiaries who are listed but have no activity, so we generate no observation period
if len(beneficiary.visit_dates) == 0:
return
obs_period_fd = file_control.get_Descriptor('observation_period')
start_date = min(beneficiary.visit_dates.keys())
end_date = max(beneficiary.visit_dates.keys())
obs_period_fd.write('{0},'.format(table_ids.last_observation_period_id))
obs_period_fd.write('{0},'.format(beneficiary.person_id))
obs_period_fd.write('{0},'.format(start_date))
obs_period_fd.write('{0},'.format(end_date))
obs_period_fd.write('{0}'.format(OMOP_CONSTANTS.OBS_PERIOD_ENROLLED_INSURANCE))
obs_period_fd.write('\n')
obs_period_fd.increment_recs_written(1)
table_ids.last_observation_period_id += 1
# -----------------------------------
# Death Record
# -----------------------------------
def write_death_records(death_fd, beneficiary, death_type_concept_id, cause_source_concept_id):
yd = beneficiary.LatestYearData()
if yd is not None and yd.BENE_DEATH_DT != '': # if year data for BENE_DEATH_DT is not available, don't write to death file.
death_fd.write('{0},'.format(beneficiary.person_id))
death_fd.write('{0},'.format(get_date_YYYY_MM_DD(yd.BENE_DEATH_DT)))
death_fd.write('{0},'.format(death_type_concept_id))
death_fd.write(',') # cause_concept_id
death_fd.write(',') # cause_source_value
death_fd.write('{0}'.format(cause_source_concept_id))
death_fd.write('\n')
death_fd.increment_recs_written(1)
# -----------------------------------
# Drug Exposure
# -----------------------------------
def write_drug_exposure(drug_exp_fd, person_id, drug_concept_id, start_date, drug_type_concept_id,
quantity, days_supply, drug_source_concept_id, drug_source_value, provider_id, visit_occurrence_id):
drug_exp_fd.write('{0},'.format(table_ids.last_drug_exposure_id))
drug_exp_fd.write('{0},'.format(person_id))
drug_exp_fd.write('{0},'.format(drug_concept_id))
drug_exp_fd.write('{0},'.format(get_date_YYYY_MM_DD(start_date))) # drug_exposure_start_date
drug_exp_fd.write(',') # drug_exposure_end_date
drug_exp_fd.write('{0},'.format(drug_type_concept_id))
drug_exp_fd.write(',') # stop_reason
drug_exp_fd.write(',') # refills
if quantity is None:
drug_exp_fd.write(',')
else:
drug_exp_fd.write('{0},'.format(float(quantity)))
if days_supply is None:
drug_exp_fd.write(',')
else:
drug_exp_fd.write('{0},'.format(days_supply))
drug_exp_fd.write(',') # sig
drug_exp_fd.write(',') # route_concept_id
drug_exp_fd.write(',') # effective_drug_dose
drug_exp_fd.write(',') # dose_unit_concept_ id
drug_exp_fd.write(',') # lot_number
drug_exp_fd.write('{0},'.format(provider_id)) # provider_id
drug_exp_fd.write('{0},'.format(visit_occurrence_id))
drug_exp_fd.write('{0},'.format(drug_source_value))
drug_exp_fd.write('{0},'.format(drug_source_concept_id))
drug_exp_fd.write(',') # route_source_value
#drug_exp_fd.write('') # dose_unit_source_value
drug_exp_fd.write('\n')
drug_exp_fd.increment_recs_written(1)
table_ids.last_drug_exposure_id += 1
# -----------------------------------
# Device Exposure
# -----------------------------------
def write_device_exposure(device_fd, person_id, device_concept_id, start_date, end_date, device_type_concept_id,
device_source_value, device_source_concept_id, provider_id, visit_occurrence_id):
device_fd.write('{0},'.format(table_ids.last_device_exposure_id))
device_fd.write('{0},'.format(person_id))
device_fd.write('{0},'.format(device_concept_id))
device_fd.write('{0},'.format(get_date_YYYY_MM_DD(start_date)))
device_fd.write('{0},'.format(get_date_YYYY_MM_DD(end_date)))
device_fd.write('{0},'.format(device_type_concept_id))
device_fd.write(',') # unique_device_id
device_fd.write(',') # quantity
device_fd.write('{0},'.format(provider_id)) # provider_id
device_fd.write('{0},'.format(visit_occurrence_id))
device_fd.write('{0},'.format(device_source_value))
device_fd.write('{0}'.format(device_source_concept_id))
device_fd.write('\n')
device_fd.increment_recs_written(1)
table_ids.last_device_exposure_id += 1
# -----------------------------------
# Prescription Drug File -> Drug Exposure; Drug Cost
# -----------------------------------
def write_drug_records(beneficiary):
drug_exp_fd = file_control.get_Descriptor('drug_exposure')
drug_cost_fd = file_control.get_Descriptor('drug_cost')
for raw_rec in beneficiary.prescription_records:
rec = PrescriptionDrug(raw_rec)
if rec.SRVC_DT == '':
continue
ndc_code = rec.PROD_SRVC_ID
if (OMOP_CONSTANTS.NDC_VOCABULARY_ID,ndc_code) in source_code_concept_dict:
#In practice we do not see multiple mappings of drugs, but in principle it could happen
for sccd in source_code_concept_dict[OMOP_CONSTANTS.NDC_VOCABULARY_ID,ndc_code]:
drug_source_concept_id = sccd.source_concept_id
drug_concept_id = sccd.target_concept_id
write_drug_exposure(drug_exp_fd, beneficiary.person_id,
drug_concept_id=drug_concept_id,
start_date=rec.SRVC_DT,
drug_type_concept_id=OMOP_CONSTANTS.DRUG_TYPE_PRESCRIPTION,
quantity=rec.QTY_DSPNSD_NUM,
days_supply=rec.DAYS_SUPLY_NUM,
drug_source_concept_id=drug_source_concept_id,
drug_source_value=ndc_code,
provider_id="",
visit_occurrence_id="")
else:
#These are for any NDC codes not in CONCEPT.csv
dline = 'DrugRecords--- ' + 'Unmapped NDC code: ' + str(ndc_code) + ' DESYNPUF_ID: ' + rec.DESYNPUF_ID + '\n'
unmapped_log.write(dline)
write_drug_exposure(drug_exp_fd, beneficiary.person_id,
drug_concept_id="0",
start_date=rec.SRVC_DT,
drug_type_concept_id=OMOP_CONSTANTS.DRUG_TYPE_PRESCRIPTION,
quantity=rec.QTY_DSPNSD_NUM,
days_supply=rec.DAYS_SUPLY_NUM,
drug_source_concept_id="0",
drug_source_value=ndc_code,
provider_id="",
visit_occurrence_id="")
#----------------------
# drug cost -- only written once, even if (doesn't happen now) NDC code maps to multiple RxNorm drugs
#----------------------
current_drug_exposure_id = table_ids.last_drug_exposure_id - 1 #subtracted 1 as drug_exposure function added 1 to last_drug_exposure_id
drug_cost_fd.write('{0},'.format(table_ids.last_drug_cost_id))
drug_cost_fd.write('{0},'.format(current_drug_exposure_id))
drug_cost_fd.write('{0},'.format(OMOP_CONSTANTS.CURRENCY_US_DOLLAR))
drug_cost_fd.write(',') # paid_copay
drug_cost_fd.write('{0},'.format(rec.PTNT_PAY_AMT)) # paid_coinsurance
drug_cost_fd.write(',') # paid_toward_deductible
drug_cost_fd.write(',') # paid_by_payer
drug_cost_fd.write(',') # paid_by_coordination_of_benefits
drug_cost_fd.write('{0},'.format(rec.PTNT_PAY_AMT)) # total_out_of_pocket #
drug_cost_fd.write('{0},'.format(rec.TOT_RX_CST_AMT)) # total_paid #
drug_cost_fd.write(',') # ingredient_cost
drug_cost_fd.write(',') # dispensing_fee
drug_cost_fd.write(',') # average_wholesale_price
#drug_cost_fd.write('') # payer_plan_period_id ##### At moment we do not have payer_plan_period implemented, as we have no payer plan information.
drug_cost_fd.write('\n')
drug_cost_fd.increment_recs_written(1)
table_ids.last_drug_cost_id += 1
# -----------------------------------
# Provider file
# -----------------------------------
def write_provider_record(provider_fd, npi, provider_id, care_site_id, provider_source_value):
if not provider_id:
return
idx = npi_provider_id[npi][1]
if idx == 0:
provider_fd.write('{0},'.format(provider_id))
provider_fd.write(',') # provider_name
provider_fd.write('{0},'.format(npi))
provider_fd.write(',') # dea
provider_fd.write(',')
provider_fd.write('{0},'.format(care_site_id))
provider_fd.write(',') # year_of_birth
provider_fd.write(',') # gender_concept_id
provider_fd.write('{0},'.format(provider_source_value)) # provider_source_value
provider_fd.write(',') # specialty_source_value
provider_fd.write(',') # specialty_source_concept_id
provider_fd.write(',') # gender_source_value
#provider_fd.write('') # gender_source_concept_id
provider_fd.write('\n')
provider_fd.increment_recs_written(1)
npi_provider_id[npi] = [npi_provider_id[npi][0],1] #set index to 1 to mark provider_id written
# -----------------------------------
# Condition Occurence file
# - Added provider_id
# -----------------------------------
def write_condition_occurrence(cond_occur_fd, person_id, condition_concept_id,
from_date, thru_date, condition_type_concept_id, provider_id,
condition_source_value, condition_source_concept_id, visit_occurrence_id):
cond_occur_fd.write('{0},'.format(table_ids.last_condition_occurrence_id))
cond_occur_fd.write('{0},'.format(person_id))
cond_occur_fd.write('{0},'.format(condition_concept_id))
cond_occur_fd.write('{0},'.format(get_date_YYYY_MM_DD(from_date)))
cond_occur_fd.write('{0},'.format(get_date_YYYY_MM_DD(thru_date)))
cond_occur_fd.write('{0},'.format(condition_type_concept_id))
cond_occur_fd.write(',') # stop_reason
cond_occur_fd.write('{0},'.format(provider_id)) # provider_id
cond_occur_fd.write('{0},'.format(visit_occurrence_id))
cond_occur_fd.write('{0},'.format(condition_source_value))
cond_occur_fd.write('{0}'.format(condition_source_concept_id))
cond_occur_fd.write('\n')
cond_occur_fd.increment_recs_written(1)
table_ids.last_condition_occurrence_id += 1
# -----------------------------------
# - Added this new function to
# create Visit Occurence file
# -----------------------------------
def write_visit_occurrence(visit_occur_fd, person_id, visit_concept_id, visit_occurrence_id, care_site_id, visit_source_concept_id,
from_date, thru_date, visit_type_concept_id, provider_id, visit_source_value):
visit_occur_fd.write('{0},'.format(visit_occurrence_id))
visit_occur_fd.write('{0},'.format(person_id))
visit_occur_fd.write('{0},'.format(visit_concept_id))
visit_occur_fd.write('{0},'.format(get_date_YYYY_MM_DD(from_date)))
visit_occur_fd.write(',') # visit_start_time
visit_occur_fd.write('{0},'.format(get_date_YYYY_MM_DD(thru_date)))
visit_occur_fd.write(',') # visit_end_time
visit_occur_fd.write('{0},'.format(visit_type_concept_id))
visit_occur_fd.write('{0},'.format(provider_id)) # provider_id
visit_occur_fd.write('{0},'.format(care_site_id)) # care_site_id
visit_occur_fd.write('{0},'.format(visit_source_value))
#visit_occur_fd.write('') # visit_source_concept_id
visit_occur_fd.write('\n')
visit_occur_fd.increment_recs_written(1)
# -----------------------------------
# Procedure Occurence file
# -----------------------------------
def write_procedure_occurrence(proc_occur_fd, person_id, procedure_concept_id,
from_date, procedure_type_concept_id,provider_id,modifier_concept_id,
procedure_source_value, procedure_source_concept_id, visit_occurrence_id):
proc_occur_fd.write('{0},'.format(table_ids.last_procedure_occurrence_id))
proc_occur_fd.write('{0},'.format(person_id))
proc_occur_fd.write('{0},'.format(procedure_concept_id))
proc_occur_fd.write('{0},'.format(get_date_YYYY_MM_DD(from_date))) # procedure_date
proc_occur_fd.write('{0},'.format(procedure_type_concept_id))
proc_occur_fd.write(',') # modifier_concept_id
proc_occur_fd.write(',') # quantity
proc_occur_fd.write('{0},'.format(provider_id)) # provider_id
proc_occur_fd.write('{0},'.format(visit_occurrence_id))
proc_occur_fd.write('{0},'.format(procedure_source_value))
proc_occur_fd.write('{0},'.format(procedure_source_concept_id))
#proc_occur_fd.write('') # qualifier_source_value
proc_occur_fd.write('\n')
proc_occur_fd.increment_recs_written(1)
table_ids.last_procedure_occurrence_id += 1
# -----------------------------------
# Measurement file
# -----------------------------------
def write_measurement(measurement_fd, person_id, measurement_concept_id,
measurement_date, measurement_type_concept_id,
measurement_source_value, measurement_source_concept_id, provider_id, visit_occurrence_id):
measurement_fd.write('{0},'.format(table_ids.last_measurement_id))
measurement_fd.write('{0},'.format(person_id))
measurement_fd.write('{0},'.format(measurement_concept_id))
measurement_fd.write('{0},'.format(get_date_YYYY_MM_DD(measurement_date)))
measurement_fd.write(',') # measurement_time
measurement_fd.write('{0},'.format(measurement_type_concept_id))
measurement_fd.write(',') # operator_concept_id
measurement_fd.write(',') # value_as_number
measurement_fd.write('0,') # value_as_concept_id
measurement_fd.write(',') # unit_concept_id
measurement_fd.write(',') # range_low
measurement_fd.write(',') # range_high
measurement_fd.write('{0},'.format(provider_id)) # provider_id
measurement_fd.write('{0},'.format(visit_occurrence_id))
measurement_fd.write('{0},'.format(measurement_source_value))
measurement_fd.write('{0},'.format(measurement_source_concept_id))
measurement_fd.write(',') # unit_source_value
#measurement_fd.write('') # value_source_value
measurement_fd.write('\n')
measurement_fd.increment_recs_written(1)
table_ids.last_measurement_id += 1
# -----------------------------------
# Observation file
# -----------------------------------
def write_observation(observation_fd, person_id, observation_concept_id,provider_id,
observation_date, observation_type_concept_id,
observation_source_value, observation_source_concept_id, visit_occurrence_id):
observation_fd.write('{0},'.format(table_ids.last_observation_id))
observation_fd.write('{0},'.format(person_id))
observation_fd.write('{0},'.format(observation_concept_id))
observation_fd.write('{0},'.format(get_date_YYYY_MM_DD(observation_date)))
observation_fd.write(',') # observation_time
observation_fd.write('{0},'.format(observation_type_concept_id))
observation_fd.write(',') # value_as_number
observation_fd.write(',') # value_as_string
observation_fd.write('0,') # value_as_concept_id
observation_fd.write(',') # qualifier_concept_id
observation_fd.write(',') # unit_concept_id
observation_fd.write('{0},'.format(provider_id)) # provider_id
observation_fd.write('{0},'.format(visit_occurrence_id))
observation_fd.write('{0},'.format(observation_source_value))
observation_fd.write('{0},'.format(observation_source_concept_id))
observation_fd.write(',') # unit_source_value
#observation_fd.write('') # qualifier_source_value
observation_fd.write('\n')
observation_fd.increment_recs_written(1)
table_ids.last_observation_id += 1
# -----------------------------------
# Write to Care Site file
# -----------------------------------
def write_care_site(care_site_fd, care_site_id, place_of_service_concept_id, care_site_source_value, place_of_service_source_value):
if not care_site_id:
return
idx = provider_id_care_site_id[care_site_source_value][1]
if idx == 0:
care_site_fd.write('{0},'.format(care_site_id))
care_site_fd.write(',') # care_site_name
care_site_fd.write('{0},'.format(place_of_service_concept_id))
care_site_fd.write(',') # location_id
care_site_fd.write('{0},'.format(care_site_source_value))
care_site_fd.write('{0}'.format(place_of_service_source_value))
care_site_fd.write('\n')
care_site_fd.increment_recs_written(1)
provider_id_care_site_id[care_site_source_value] = [provider_id_care_site_id[care_site_source_value][0],1] # change index to 1 to mark it written
# -----------------------------------
# From Inpatient Records:
# --> Visit Occurrence
# --> Visit Cost
# --> Procedure Occurrence
# --> Drug Exposure
# --> Device Exposure
# --> Condition Occurrence
# --> Measurement Occurrence
# --> Observation
# --> Care Site
# --> Provider
# -----------------------------------
def process_inpatient_records(beneficiary):
drug_exp_fd = file_control.get_Descriptor('drug_exposure')
drug_cost_fd = file_control.get_Descriptor('drug_cost')
proc_occur_fd = file_control.get_Descriptor('procedure_occurrence')
proc_cost_fd = file_control.get_Descriptor('procedure_cost')
cond_occur_fd = file_control.get_Descriptor('condition_occurrence')
death_fd = file_control.get_Descriptor('death')
care_site_fd = file_control.get_Descriptor('care_site')
provider_fd = file_control.get_Descriptor('provider')
measurement_fd = file_control.get_Descriptor('measurement_occurrence')
observation_fd = file_control.get_Descriptor('observation')
device_fd = file_control.get_Descriptor('device_exposure')
visit_occur_fd = file_control.get_Descriptor('visit_occurrence')
visit_cost_fd = file_control.get_Descriptor('visit_cost')
# location_fd = file_control.get_Descriptor('location')
for raw_rec in beneficiary.inpatient_records:
rec = InpatientClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
# initialize both care_site_id and provider_id to null as some institution might not have PRVDR_NUM and some NPI might be null.
care_site_id = ""
provider_id = ""
# --get care_site_id (a unique number generated by the program) for the given institution (PRVDR_NUM)
if rec.PRVDR_NUM != '':
provider_number = rec.PRVDR_NUM
care_site_id = get_CareSite(provider_number)
write_care_site(care_site_fd, care_site_id,
place_of_service_concept_id=OMOP_CONSTANTS.INPATIENT_PLACE_OF_SERVICE,
care_site_source_value=rec.PRVDR_NUM,
place_of_service_source_value=OMOP_CONSTANTS.INPATIENT_PLACE_OF_SERVICE_SOURCE)
#-- get provider_id (a unique number generated by the program) for the given NPI. Each NPI will have its own provider_id
for npi in (rec.AT_PHYSN_NPI, rec.OP_PHYSN_NPI, rec.OT_PHYSN_NPI):
if npi != '':
provider_id = get_Provider(npi)
write_provider_record(provider_fd, npi, provider_id, care_site_id, rec.AT_PHYSN_NPI)
#-- get visit id. Person id + CLM_FROM_DT + CLM_THRU_DT + institution number(PRVDR_NUM) make the key for a particular visit
current_visit_id = visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM]
for (vocab,code) in ([(OMOP_CONSTANTS.ICD_9_VOCAB_ID, x) for x in rec.ICD9_DGNS_CD_list] +
[(OMOP_CONSTANTS.ICD_9_VOCAB_ID,x) for x in rec.ICD9_PRCDR_CD_list] +
[(OMOP_CONSTANTS.HCPCS_VOCABULARY_ID, x) for x in rec.HCPCS_CD_list]):
if rec.CLM_FROM_DT != '':
if (vocab,code) in source_code_concept_dict:
for sccd in source_code_concept_dict[vocab,code]:
target_concept_id = sccd.target_concept_id
source_concept_id = sccd.source_concept_id
destination_file = sccd.destination_file
if destination_file == DESTINATION_FILE_PROCEDURE:
write_procedure_occurrence(proc_occur_fd, beneficiary.person_id,
procedure_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT,
procedure_type_concept_id=OMOP_CONSTANTS.INPAT_PROCEDURE_1ST_POSITION,
procedure_source_value=code,
procedure_source_concept_id=source_concept_id,
provider_id=provider_id,
modifier_concept_id=0,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_CONDITION:
write_condition_occurrence(cond_occur_fd,beneficiary.person_id,
condition_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
condition_type_concept_id=OMOP_CONSTANTS.INPAT_CONDITION_1ST_POSITION,
condition_source_value=code,
condition_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DRUG:
write_drug_exposure(drug_exp_fd, beneficiary.person_id,
drug_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
drug_type_concept_id=OMOP_CONSTANTS.DRUG_TYPE_PRESCRIPTION,
quantity=None,
days_supply=None,
drug_source_value=code,
drug_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_MEASUREMENT:
write_measurement(measurement_fd, beneficiary.person_id,
measurement_concept_id=target_concept_id,
measurement_date=rec.CLM_FROM_DT,
measurement_type_concept_id=OMOP_CONSTANTS.MEASUREMENT_DERIVED_VALUE,
measurement_source_value=code,
measurement_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_OBSERVATION:
write_observation(observation_fd, beneficiary.person_id,
observation_concept_id=target_concept_id,
observation_date=rec.CLM_FROM_DT,
observation_type_concept_id=OMOP_CONSTANTS.OBSERVATION_CHIEF_COMPLAINT,
observation_source_value=code,
observation_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DEVICE:
write_device_exposure(device_fd, beneficiary.person_id,
device_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
end_date=rec.CLM_THRU_DT,
device_type_concept_id=OMOP_CONSTANTS.DEVICE_INFERRED_PROCEDURE_CLAIM,
device_source_value=code,
device_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
#-- Write each unique visit to visit_occurrence file.
if current_visit_id not in visit_id_list:
write_visit_occurrence(visit_occur_fd,beneficiary.person_id,
visit_concept_id=OMOP_CONSTANTS.INPAT_VISIT_CONCEPT_ID,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
visit_type_concept_id=OMOP_CONSTANTS.INPAT_VISIT_1ST_POSITION,
visit_source_value=rec.CLM_ID,
visit_source_concept_id=source_concept_id,
care_site_id=care_site_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
visit_id_list.add(current_visit_id)
else:
dfile = 'Inpatient--- unmapped ' + str(vocab) + ' code: ' + str(code) + ' DESYNPUF_ID: ' + rec.DESYNPUF_ID + '\n'
unmapped_log.write(dfile)
#-- care site / provider
# -----------------------------------
# From Outpatient Records:
# --> Visit Occurrence
# --> Visit Cost
# --> Procedure Occurrence
# --> Drug Exposure
# --> Device Exposure
# --> Device Exposure Cost
# --> Condition Occurrence
# --> Measurement Occurrence
# --> Observation
# --> Care Site
# --> Provider
# -----------------------------------
def process_outpatient_records(beneficiary):
drug_exp_fd = file_control.get_Descriptor('drug_exposure')
drug_cost_fd = file_control.get_Descriptor('drug_cost')
proc_occur_fd = file_control.get_Descriptor('procedure_occurrence')
proc_cost_fd = file_control.get_Descriptor('procedure_cost')
cond_occur_fd = file_control.get_Descriptor('condition_occurrence')
death_fd = file_control.get_Descriptor('death')
care_site_fd = file_control.get_Descriptor('care_site')
provider_fd = file_control.get_Descriptor('provider')
measurement_fd = file_control.get_Descriptor('measurement_occurrence')
observation_fd = file_control.get_Descriptor('observation')
device_fd = file_control.get_Descriptor('device_exposure')
visit_occur_fd = file_control.get_Descriptor('visit_occurrence')
visit_cost_fd = file_control.get_Descriptor('visit_cost')
for raw_rec in beneficiary.outpatient_records:
rec = OutpatientClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
# initialize both care_site_id and provider_id to null as some institution might not have PRVDR_NUM and some NPI might be null.
care_site_id = ""
provider_id = ""
#-- get care_site_id (a unique number generated by the program) for the given institution (PRVDR_NUM)
if rec.PRVDR_NUM != '':
provider_number = rec.PRVDR_NUM
care_site_id = get_CareSite(provider_number)
write_care_site(care_site_fd, care_site_id,
place_of_service_concept_id=OMOP_CONSTANTS.OUTPATIENT_PLACE_OF_SERVICE,
care_site_source_value=rec.PRVDR_NUM,
place_of_service_source_value=OMOP_CONSTANTS.OUTPATIENT_PLACE_OF_SERVICE_SOURCE)
#-- get provider_id (a unique number generated by the program) for the given NPI. Each NPI will have its own provider_id
for npi in (rec.AT_PHYSN_NPI, rec.OP_PHYSN_NPI, rec.OT_PHYSN_NPI):
if npi != '':
provider_id = get_Provider(npi)
write_provider_record(provider_fd, npi, provider_id, care_site_id, rec.AT_PHYSN_NPI)
#-- get visit id. Person id + CLM_FROM_DT + CLM_THRU_DT + institution number(PRVDR_NUM) make the key for a particular visit
current_visit_id = visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.PRVDR_NUM]
for (vocab,code) in ( ([] if rec.ADMTNG_ICD9_DGNS_CD == "" else [(OMOP_CONSTANTS.ICD_9_VOCAB_ID,rec.ADMTNG_ICD9_DGNS_CD)]) +
[(OMOP_CONSTANTS.ICD_9_VOCAB_ID,x) for x in rec.ICD9_DGNS_CD_list] +
[(OMOP_CONSTANTS.ICD_9_VOCAB_ID,x) for x in rec.ICD9_PRCDR_CD_list] +
[(OMOP_CONSTANTS.HCPCS_VOCABULARY_ID,x) for x in rec.HCPCS_CD_list]):
if rec.CLM_FROM_DT != '':
if (vocab,code) in source_code_concept_dict:
for sccd in source_code_concept_dict[vocab,code]:
target_concept_id = sccd.target_concept_id
source_concept_id = sccd.source_concept_id
destination_file = sccd.destination_file
if destination_file == DESTINATION_FILE_PROCEDURE:
write_procedure_occurrence(proc_occur_fd, beneficiary.person_id,
procedure_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT,
procedure_type_concept_id=OMOP_CONSTANTS.OUTPAT_PROCEDURE_1ST_POSITION,
procedure_source_value=code,
procedure_source_concept_id=source_concept_id,
provider_id=provider_id,
modifier_concept_id=0,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_CONDITION:
write_condition_occurrence(cond_occur_fd,beneficiary.person_id,
condition_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
condition_type_concept_id=OMOP_CONSTANTS.OUTPAT_CONDITION_1ST_POSITION,
condition_source_value=code,
condition_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DRUG:
write_drug_exposure(drug_exp_fd, beneficiary.person_id,
drug_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
drug_type_concept_id=OMOP_CONSTANTS.DRUG_TYPE_PRESCRIPTION,
quantity=None,
days_supply=None,
drug_source_value=code,
drug_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_MEASUREMENT:
write_measurement(measurement_fd, beneficiary.person_id,
measurement_concept_id=target_concept_id,
measurement_date=rec.CLM_FROM_DT,
measurement_type_concept_id=OMOP_CONSTANTS.MEASUREMENT_DERIVED_VALUE,
measurement_source_value=code,
measurement_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_OBSERVATION:
write_observation(observation_fd, beneficiary.person_id,
observation_concept_id=target_concept_id,
observation_date=rec.CLM_FROM_DT,
observation_type_concept_id=OMOP_CONSTANTS.OBSERVATION_CHIEF_COMPLAINT,
observation_source_value=code,
observation_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DEVICE:
write_device_exposure(device_fd, beneficiary.person_id,
device_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
end_date=rec.CLM_THRU_DT,
device_type_concept_id=OMOP_CONSTANTS.DEVICE_INFERRED_PROCEDURE_CLAIM,
device_source_value=code,
device_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
#-- Write each unique visit to visit_occurrence file.
if current_visit_id not in visit_id_list:
write_visit_occurrence(visit_occur_fd,beneficiary.person_id,
visit_concept_id=OMOP_CONSTANTS.OUTPAT_VISIT_CONCEPT_ID,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
visit_type_concept_id=OMOP_CONSTANTS.OUTPAT_VISIT_1ST_POSITION,
visit_source_value=rec.CLM_ID,
visit_source_concept_id=source_concept_id,
care_site_id=care_site_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
visit_id_list.add(current_visit_id)
else:
dfile = 'Outpatient--- unmapped ' + str(vocab) + ' code: ' + str(code) + ' DESYNPUF_ID: ' + rec.DESYNPUF_ID + '\n'
unmapped_log.write(dfile)
# -----------------------------------
# From Carrier Claims Records:
# --> Visit Occurrence
# --> Visit Cost
# --> Procedure Occurrence
# --> Drug Exposure
# --> Device Exposure
# --> Device Exposure Cost
# --> Condition Occurrence
# --> Measurement Occurrence
# --> Observation
# --> Care Site
# --> Provider
# -----------------------------------
def process_carrier_records(beneficiary):
drug_exp_fd = file_control.get_Descriptor('drug_exposure')
drug_cost_fd = file_control.get_Descriptor('drug_cost')
proc_occur_fd = file_control.get_Descriptor('procedure_occurrence')
proc_cost_fd = file_control.get_Descriptor('procedure_cost')
cond_occur_fd = file_control.get_Descriptor('condition_occurrence')
death_fd = file_control.get_Descriptor('death')
care_site_fd = file_control.get_Descriptor('care_site')
provider_fd = file_control.get_Descriptor('provider')
measurement_fd = file_control.get_Descriptor('measurement_occurrence')
observation_fd = file_control.get_Descriptor('observation')
device_fd = file_control.get_Descriptor('device_exposure')
visit_occur_fd = file_control.get_Descriptor('visit_occurrence')
visit_cost_fd = file_control.get_Descriptor('visit_cost')
for raw_rec in beneficiary.carrier_records:
rec = CarrierClaim(raw_rec)
if rec.CLM_FROM_DT == '':
continue
# initialize both care_site_id and provider_id to null as some institution might not have PRVDR_NUM and some NPI might be null.
care_site_id = ""
provider_id = ""
#-- get care_site_id (a unique number generated by the program) for the given TAX_NUM
for cc_line in rec.CarrierClaimLine_list:
# initialize both care_site_id and provider_id to null as some institution might not have PRVDR_NUM and some NPI might be null.
care_site_id = ''
provider_id = ''
if cc_line.TAX_NUM != '':
save_TAX_NUM = cc_line.TAX_NUM
care_site_id = get_CareSite(cc_line.TAX_NUM)
write_care_site(care_site_fd, care_site_id,
place_of_service_concept_id=OMOP_CONSTANTS.CARRIER_CLAIMS_PLACE_OF_SERVICE,
care_site_source_value=cc_line.TAX_NUM,
place_of_service_source_value=OMOP_CONSTANTS.CARRIER_CLAIMS_PLACE_OF_SERVICE_SOURCE)
#-- get provider_id (a unique number generated by the program) for the given NPI. Each NPI will have its own provider_id
if cc_line.PRF_PHYSN_NPI != '':
npi = cc_line.PRF_PHYSN_NPI
provider_id = get_Provider(npi)
write_provider_record(provider_fd, npi, provider_id, care_site_id, cc_line.PRF_PHYSN_NPI)
#-- get visit id. Person id + CLM_FROM_DT + CLM_THRU_DT + TAX_NUM make the key for a particular visit
current_visit_id = visit_occurrence_ids[rec.DESYNPUF_ID,rec.CLM_FROM_DT,rec.CLM_THRU_DT,rec.TAX_NUM]
for (vocab,code) in ([(OMOP_CONSTANTS.ICD_9_VOCAB_ID,x) for x in rec.ICD9_DGNS_CD_list] +
[(OMOP_CONSTANTS.HCPCS_VOCABULARY_ID, x) for x in rec.HCPCS_CD_list] +
[(OMOP_CONSTANTS.ICD_9_VOCAB_ID, x) for x in rec.LINE_ICD9_DGNS_CD_list]):
if rec.CLM_FROM_DT != '':
if (vocab,code) in source_code_concept_dict:
for sccd in source_code_concept_dict[vocab,code]:
target_concept_id = sccd.target_concept_id
source_concept_id = sccd.source_concept_id
destination_file = sccd.destination_file
if destination_file == DESTINATION_FILE_PROCEDURE:
write_procedure_occurrence(proc_occur_fd, beneficiary.person_id,
procedure_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT,
procedure_type_concept_id=OMOP_CONSTANTS.OUTPAT_PROCEDURE_1ST_POSITION,
procedure_source_value=code,
procedure_source_concept_id=source_concept_id,
provider_id=provider_id,
modifier_concept_id=0,
visit_occurrence_id=current_visit_id)
#-- procedure cost. If there is an entry in procedure occurence, then only procedure cost should be updated.
current_procedure_occurence_id = table_ids.last_procedure_occurrence_id - 1 # after writing procedure occurence, id is increased by 1 and hence subtracted 1 to get the same id.
for cc_line in rec.CarrierClaimLine_list:
if cc_line.has_nonzero_amount():
proc_cost_fd.write('{0},'.format(table_ids.last_procedure_cost_id))
proc_cost_fd.write('{0},'.format(current_procedure_occurence_id))
proc_cost_fd.write('{0},'.format(OMOP_CONSTANTS.CURRENCY_US_DOLLAR)) # currency_concept_id
proc_cost_fd.write(',') # paid_copay
proc_cost_fd.write('{0},'.format(cc_line.LINE_COINSRNC_AMT)) # paid_coinsurance
proc_cost_fd.write('{0},'.format(cc_line.LINE_BENE_PTB_DDCTBL_AMT)) # paid_toward_deductible
proc_cost_fd.write('{0},'.format(cc_line.LINE_NCH_PMT_AMT)) # paid_by_payer
proc_cost_fd.write('{0},'.format(cc_line.LINE_BENE_PRMRY_PYR_PD_AMT)) # paid_by_coordination_benefits
amt = 0
try:
amt = float(cc_line.LINE_BENE_PTB_DDCTBL_AMT) + float(cc_line.LINE_COINSRNC_AMT)
except:
pass
proc_cost_fd.write('{0:2},'.format(amt)) # total_out_of_pocket
proc_cost_fd.write('{0},'.format(cc_line.LINE_ALOWD_CHRG_AMT)) # total_paid
proc_cost_fd.write(',') # revenue_code_concept_id
##
## need to lookup
##
proc_cost_fd.write(',') # payer_plan_period_id Changed to space as payer_plan_period file is not created
#proc_cost_fd.write('') # revenue_code_source_value
proc_cost_fd.write('\n')
proc_cost_fd.increment_recs_written(1)
table_ids.last_procedure_cost_id += 1
elif destination_file == DESTINATION_FILE_CONDITION:
write_condition_occurrence(cond_occur_fd,beneficiary.person_id,
condition_concept_id=target_concept_id,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
condition_type_concept_id=OMOP_CONSTANTS.OUTPAT_CONDITION_1ST_POSITION,
condition_source_value=code,
condition_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DRUG:
write_drug_exposure(drug_exp_fd, beneficiary.person_id,
drug_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
drug_type_concept_id=OMOP_CONSTANTS.DRUG_TYPE_PRESCRIPTION,
quantity=None,
days_supply=None,
drug_source_value=code,
drug_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_MEASUREMENT:
write_measurement(measurement_fd, beneficiary.person_id,
measurement_concept_id=target_concept_id,
measurement_date=rec.CLM_FROM_DT,
measurement_type_concept_id=OMOP_CONSTANTS.MEASUREMENT_DERIVED_VALUE,
measurement_source_value=code,
measurement_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_OBSERVATION:
write_observation(observation_fd, beneficiary.person_id,
observation_concept_id=target_concept_id,
observation_date=rec.CLM_FROM_DT,
observation_type_concept_id=OMOP_CONSTANTS.OBSERVATION_CHIEF_COMPLAINT,
observation_source_value=code,
observation_source_concept_id=source_concept_id,
provider_id=provider_id, #
visit_occurrence_id=current_visit_id)
elif destination_file == DESTINATION_FILE_DEVICE:
write_device_exposure(device_fd, beneficiary.person_id,
device_concept_id=target_concept_id,
start_date=rec.CLM_FROM_DT,
end_date=rec.CLM_THRU_DT,
device_type_concept_id=OMOP_CONSTANTS.DEVICE_INFERRED_PROCEDURE_CLAIM,
device_source_value=code,
device_source_concept_id=source_concept_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
#-- Write each unique visit to visit_occurrence file.
if current_visit_id not in visit_id_list:
write_visit_occurrence(visit_occur_fd,beneficiary.person_id,
visit_concept_id=OMOP_CONSTANTS.CARRIER_CLAIMS_VISIT_CONCEPT_ID,
from_date=rec.CLM_FROM_DT, thru_date=rec.CLM_THRU_DT,
visit_type_concept_id=OMOP_CONSTANTS.CARRIER_CLAIMS_VISIT_1ST_POSITION,
visit_source_value=rec.CLM_ID,
visit_source_concept_id=source_concept_id,
care_site_id=care_site_id,
provider_id=provider_id,
visit_occurrence_id=current_visit_id)
visit_id_list.add(current_visit_id)
else:
dfile = 'CarrierClaim--- unmapped ' + str(vocab) + ' code: ' + str(code) + ' DESYNPUF_ID: ' + rec.DESYNPUF_ID + '\n'
unmapped_log.write(dfile)
#---------------------------------
def write_header_records():
headers = {
'person' :
'person_id,gender_concept_id,year_of_birth,month_of_birth,day_of_birth,time_of_birth,race_concept_id,ethnicity_concept_id,'
'location_id,provider_id,care_site_id,person_source_value,gender_source_value,gender_source_concept_id,race_source_value,'
'race_source_concept_id,ethnicity_source_value,ethnicity_source_concept_id',
'observation':
'observation_id,person_id,observation_concept_id,observation_date,observation_time,observation_type_concept_id,value_as_number,'
'value_as_string,value_as_concept_id,qualifier_concept_id,unit_concept_id,provider_id,visit_occurrence_id,observation_source_value,'
'observation_source_concept_id,unit_source_value,qualifier_source_value',
'observation_period':
'observation_period_id,person_id,observation_period_start_date,observation_period_end_date,period_type_concept_id',
'specimen':
'specimen_id,person_id,specimen_concept_id,specimen_type_concept_id,specimen_date,specimen_time,quantity,'
'unit_concept_id,anatomic_site_concept_id,disease_status_concept_id,specimen_source_id,specimen_source_value,unit_source_value,'
'anatomic_site_source_value,disease_status_source_value',
'death':
'person_id,death_date,death_type_concept_id,cause_concept_id,cause_source_value,cause_source_concept_id',
'visit_occurrence':
'visit_occurrence_id,person_id,visit_concept_id,visit_start_date,visit_start_time,visit_end_date,visit_end_time,'
'visit_type_concept_id,provider_id,care_site_id,visit_source_value,visit_source_concept_id',
'visit_cost':
'visit_cost_id,visit_occurrence_id,currency_concept_id,paid_copay,paid_coinsurance,paid_toward_deductible,'
'paid_by_payer,paid_by_coordination_benefits,total_out_of_pocket,total_paid,payer_plan_period_id',
'condition_occurrence':
'condition_occurrence_id,person_id,condition_concept_id,condition_start_date,condition_end_date,condition_type_concept_id,'
'stop_reason,provider_id,visit_occurrence_id,condition_source_value,condition_source_concept_id',
'procedure_occurrence':
'procedure_occurrence_id,person_id,procedure_concept_id,procedure_date,procedure_type_concept_id,modifier_concept_id,'
'quantity,provider_id,visit_occurrence_id,procedure_source_value,procedure_source_concept_id,qualifier_source_value',
'procedure_cost':
'procedure_cost_id,procedure_occurrence_id,currency_concept_id,paid_copay,paid_coinsurance,paid_toward_deductible,'
'paid_by_payer,paid_by_coordination_benefits,total_out_of_pocket,total_paid,revenue_code_concept_id,payer_plan_period_id,revenue_code_source_value',
'drug_exposure':
'drug_exposure_id,person_id,drug_concept_id,drug_exposure_start_date,drug_exposure_end_date,drug_type_concept_id,'
'stop_reason,refills,quantity,days_supply,sig,route_concept_id,effective_drug_dose,dose_unit_concept_id,'
'lot_number,provider_id,visit_occurrence_id,drug_source_value,drug_source_concept_id,route_source_value,dose_unit_source_value',
'drug_cost':
'drug_cost_id,drug_exposure_id,currency_concept_id,paid_copay,paid_coinsurance,paid_toward_deductible,paid_by_payer,paid_by_coordination_of_benefits,'
'total_out_of_pocket,total_paid,ingredient_cost,dispensing_fee,average_wholesale_price,payer_plan_period_id',
'device_exposure':
'device_exposure_id,person_id,device_concept_id,device_exposure_start_date,device_exposure_end_date,device_type_concept_id,'
'unique_device_id,quantity,provider_id,visit_occurrence_id,device_source_value,device_source_concept_id',
'device_cost':
'device_cost_id,device_exposure_id,currency_concept_id,paid_copay,paid_coinsurance,paid_toward_deductible,'
'paid_by_payer,paid_by_coordination_benefits,total_out_of_pocket,total_paid,payer_plan_period_id',
'measurement_occurrence':
'measurement_id,person_id,measurement_concept_id,measurement_date,measurement_time,measurement_type_concept_id,operator_concept_id,'
'value_as_number,value_as_concept_id,unit_concept_id,range_low,range_high,provider_id,visit_occurrence_id,measurement_source_value,'
'measurement_source_concept_id,unit_source_value,value_source_value',
'location':
'location_id,address_1,address_2,city,state,zip,county,location_source_value',
'care_site':
'care_site_id,care_site_name,place_of_service_concept_id,location_id,care_site_source_value,place_of_service_source_value',
'provider':
'provider_id,provider_name,NPI,DEA,specialty_concept_id,care_site_id,year_of_birth,gender_concept_id,provider_source_value,'
'specialty_source_value,specialty_source_concept_id,gender_source_value,gender_source_concept_id',
'payer_plan_period':
'payer_plan_period_id,person_id,payer_plan_period_start_date,payer_plan_period_end_date,payer_source_value,'
'plan_source_value,family_source_value',
}
for token in sorted(file_control.descriptor_list(which='output')):
fd = file_control.get_Descriptor(token)
fd.write(headers[token] + '\n')
fd.increment_recs_written(1)
#---------------------------------
#Dead code
#---------------------------------
'''
def dump_beneficiary_records(fout, rec):
fout.write('-'*80+'\n')
for rec in ben.carrier_records:
fout.write('[carrier] {0}\n'.format(rec))
cc = CarrierClaim(rec)
fout.write('[CarrierClaim]\n')
fout.write('\t CLM_ID ={0}\n'.format(cc.CLM_ID))
fout.write('\t CLM_FROM_DT ={0}\n'.format(cc.CLM_FROM_DT))
fout.write('\t CLM_THRU_DT ={0}\n'.format(cc.CLM_THRU_DT))
for cd in cc.ICD9_DGNS_CD_list:
fout.write('\t\t {0} \n'.format(cd))
for ix,line in enumerate(cc.CarrierClaimLine_list):
fout.write('\t\t' + str(ix) + ' ' + '-'*30+'\n')
fout.write('\t\t PRF_PHYSN_NPI ={0} \n'.format(line.PRF_PHYSN_NPI))
fout.write('\t\t TAX_NUM ={0} \n'.format(line.TAX_NUM))
fout.write('\t\t HCPCS_CD ={0} \n'.format(line.HCPCS_CD))
fout.write('\t\t LINE_NCH_PMT_AMT ={0} \n'.format(line.LINE_NCH_PMT_AMT))
fout.write('\t\t LINE_BENE_PTB_DDCTBL_AMT ={0} \n'.format(line.LINE_BENE_PTB_DDCTBL_AMT))
fout.write('\t\t LINE_BENE_PRMRY_PYR_PD_AMT ={0} \n'.format(line.LINE_BENE_PRMRY_PYR_PD_AMT))
fout.write('\t\t LINE_COINSRNC_AMT ={0} \n'.format(line.LINE_COINSRNC_AMT))
fout.write('\t\t LINE_ALOWD_CHRG_AMT ={0} \n'.format(line.LINE_ALOWD_CHRG_AMT))
fout.write('\t\t LINE_PRCSG_IND_CD ={0} \n'.format(line.LINE_PRCSG_IND_CD))
fout.write('\t\t LINE_ICD9_DGNS_CD ={0} \n'.format(line.LINE_ICD9_DGNS_CD))
for rec in ben.inpatient_records:
fout.write('[inpatient] {0}\n'.format(rec))
ip = InpatientClaim(rec)
fout.write('[InpatientClaim]\n')
fout.write('\t CLM_ID ={0}\n'.format(ip.CLM_ID))
fout.write('\t SEGMENT ={0}\n'.format(ip.SEGMENT))
fout.write('\t CLM_FROM_DT ={0}\n'.format(ip.CLM_FROM_DT))
fout.write('\t ICD9_DGNS_CD_list \n')
for cd in ip.ICD9_DGNS_CD_list:
fout.write('\t\t {0} \n'.format(cd))
for rec in ben.outpatient_records:
fout.write('[outpatient] {0}\n'.format(rec))
op = OutpatientClaim(rec)
fout.write('[OutpatientClaim]\n')
fout.write('\t CLM_ID ={0}\n'.format(op.CLM_ID))
fout.write('\t SEGMENT ={0}\n'.format(op.SEGMENT))
fout.write('\t CLM_FROM_DT ={0}\n'.format(op.CLM_FROM_DT))
fout.write('\t ICD9_DGNS_CD_list \n')
for cd in op.ICD9_DGNS_CD_list:
fout.write('\t\t {0} \n'.format(cd))
for rec in ben.prescription_records:
fout.write('[prescription] {0}\n'.format(rec))
rx = PrescriptionDrug(rec)
fout.write('[PrescriptionDrug]\n')
fout.write('\t PDE_ID ={0}\n'.format(rx.PDE_ID))
fout.write('\t SRVC_DT ={0}\n'.format(rx.SRVC_DT))
fout.write('\t PROD_SRVC_ID ={0}\n'.format(rx.PROD_SRVC_ID))
fout.write('\t QTY_DSPNSD_NUM ={0}\n'.format(rx.QTY_DSPNSD_NUM))
fout.write('\t DAYS_SUPLY_NUM ={0}\n'.format(rx.DAYS_SUPLY_NUM))
fout.write('\t PTNT_PAY_AMT ={0}\n'.format(rx.PTNT_PAY_AMT))
fout.write('\t TOT_RX_CST_AMT ={0}\n'.format(rx.TOT_RX_CST_AMT))
'''
def process_beneficiary(bene):
bene.LoadClaimData(file_control)
write_person_record(bene)
write_payer_plan_period_record(bene)
write_location_record(bene)
determine_visits(bene)
write_observation_period_records(bene)
write_death_records(file_control.get_Descriptor('death'), bene,
death_type_concept_id=OMOP_CONSTANTS.DEATH_TYPE_PAYER_ENR_STATUS,
cause_source_concept_id=0)
write_drug_records(bene)
process_inpatient_records(bene)
process_outpatient_records(bene)
process_carrier_records(bene)
file_control.flush_all()
#---------------------------------
#Dead code
#---------------------------------
'''
def dump_source_concept_codes():
rec_types = {'icd9':0, 'icd9proc':0, 'hcpcs':0, 'cpt':0, 'ndc':0}
recs_in = recs_out = 0
code_file_out = os.path.join(BASE_OUTPUT_DIRECTORY, 'codes_1.txt')
icd9_codes = {}
hcpcs_codes = {}
cpt_codes = {}
ndc_codes = {}
with open(code_file_out, 'w') as fout_codes:
def write_code_rec(DESYNPUF_ID, record_number, record_type, code_type, code_value):
fout_codes.write("{0},{1},{2},{3},{4}\n".format(DESYNPUF_ID, record_number, record_type, code_type, code_value))
rec_types[code_type] += 1
def check_carrier_claims():
global recs_in
global recs_out
with open('/Data/OHDSI/CMS_SynPuf/DE1_1/DE1_0_2008_to_2010_Carrier_Claims_Sample_1AB.csv.srt','rU') as fin:
for raw_rec in fin:
recs_in += 1
if recs_in % 50000 == 0:
print 'carrier-claims, recs_in=', recs_in
# print '[{0}] {1}'.format(recs_in, rec[:-1])
# fout_codes.write('[{0}] {1}\n'.format(recs_in, raw_rec[:-1]))
# if recs_in > 100: break
if "DESYNPUF_ID" in raw_rec: continue
rec = CarrierClaim((raw_rec[:-1]).split(','))
for src_code in rec.ICD9_DGNS_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
fout_codes.write("{0},{1},cc,icd9-1,{2}\n".format(rec.DESYNPUF_ID, recs_in, src_code))
recs_out += 1
rec_types['icd9'] += 1
for src_code in rec.HCPCS_CD_list:
if src_code in hcpcs_codes:
hcpcs_codes[src_code] += 1
else:
hcpcs_codes[src_code] = 1
fout_codes.write("{0},{1},cc,hcpcs,{2}\n".format(rec.DESYNPUF_ID, recs_in, src_code))
recs_out += 1
rec_types['hcpcs'] += 1
for src_code in rec.LINE_ICD9_DGNS_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
fout_codes.write("{0},{1},cc,icd9,{2}\n".format(rec.DESYNPUF_ID, recs_in, src_code))
recs_out += 1
rec_types['icd9'] += 1
fout_codes.flush()
def check_inpatient_claims():
global recs_in
global recs_out
with open('/Data/OHDSI/CMS_SynPuf/DE1_1/DE1_0_2008_to_2010_Inpatient_Claims_Sample_1.csv','rU') as fin:
record_type = 'ip'
for raw_rec in fin:
recs_in += 1
if recs_in % 10000 == 0:
print 'inpatient-claims, recs_in=', recs_in
# print '[{0}] {1}'.format(recs_in, rec[:-1])
# fout_codes.write('[{0}] {1}\n'.format(recs_in, raw_rec[:-1]))
# if recs_in > 100: break
if "DESYNPUF_ID" in raw_rec: continue
rec = InpatientClaim((raw_rec[:-1]).split(','))
for src_code in rec.ICD9_DGNS_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='icd9', code_value=src_code)
recs_out += 1
for src_code in rec.HCPCS_CD_list:
if src_code in hcpcs_codes:
hcpcs_codes[src_code] += 1
else:
hcpcs_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='hcpcs', code_value=src_code)
recs_out += 1
for src_code in rec.ICD9_PRCDR_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='icd9proc', code_value=src_code)
recs_out += 1
def check_outpatient_claims():
global recs_in
global recs_out
with open('/Data/OHDSI/CMS_SynPuf/DE1_1/DE1_0_2008_to_2010_Outpatient_Claims_Sample_1.csv','rU') as fin:
record_type = 'op'
for raw_rec in fin:
recs_in += 1
if recs_in % 10000 == 0:
print 'outpatient-claims, recs_in=', recs_in
# print '[{0}] {1}'.format(recs_in, rec[:-1])
# fout_codes.write('[{0}] {1}\n'.format(recs_in, raw_rec[:-1]))
# if recs_in > 100: break
if "DESYNPUF_ID" in raw_rec: continue
rec = OutpatientClaim((raw_rec[:-1]).split(','))
for src_code in rec.ICD9_DGNS_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='icd9', code_value=src_code)
recs_out += 1
for src_code in rec.HCPCS_CD_list:
if src_code in hcpcs_codes:
hcpcs_codes[src_code] += 1
else:
hcpcs_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='hcpcs', code_value=src_code)
recs_out += 1
for src_code in rec.ICD9_PRCDR_CD_list:
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='icd9proc', code_value=src_code)
recs_out += 1
if len(rec.ADMTNG_ICD9_DGNS_CD) > 0:
src_code = rec.ADMTNG_ICD9_DGNS_CD
if src_code in icd9_codes:
icd9_codes[src_code] += 1
else:
icd9_codes[src_code] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='icd9', code_value=src_code)
recs_out += 1
def check_prescription_drug():
global recs_in
global recs_out
with open('/Data/OHDSI/CMS_SynPuf/DE1_1/DE1_0_2008_to_2010_Prescription_Drug_Events_Sample_1.csv','rU') as fin:
record_type = 'rx'
for raw_rec in fin:
recs_in += 1
if recs_in % 10000 == 0:
print 'prescription-drugs, recs_in=', recs_in
# print '[{0}] {1}'.format(recs_in, rec[:-1])
# fout_codes.write('[{0}] {1}\n'.format(recs_in, raw_rec[:-1]))
# if recs_in > 100: break
if "DESYNPUF_ID" in raw_rec: continue
rec = PrescriptionDrug((raw_rec[:-1]).split(','))
if len(rec.PROD_SRVC_ID) > 0:
ndc = rec.PROD_SRVC_ID
if ndc in ndc_codes:
ndc_codes[ndc] += 1
else:
ndc_codes[ndc] = 1
write_code_rec(rec.DESYNPUF_ID, recs_in, record_type, code_type='ndc', code_value=ndc)
recs_out += 1
check_carrier_claims()
check_inpatient_claims()
check_outpatient_claims()
check_prescription_drug()
code_summary_file = os.path.join(BASE_OUTPUT_DIRECTORY, 'code_summary.txt')
with open(code_summary_file, 'w') as fout:
for label, dct in [ ('ndc', ndc_codes),
('hcpcs', hcpcs_codes),
('cpt', cpt_codes),
('icd9', icd9_codes)]:
for code, recs in dct.items():
fout.write("{0},{1},{2}\n".format(label, code, recs))
print '--done: recs-in=',recs_in,', out=', recs_out
for type, count in rec_types.items():
print type,count
'''
#---------------------------------
# start of the program
#---------------------------------
if __name__ == '__main__':
if not os.path.exists(BASE_OUTPUT_DIRECTORY): os.makedirs(BASE_OUTPUT_DIRECTORY)
if not os.path.exists(BASE_ETL_CONTROL_DIRECTORY): os.makedirs(BASE_ETL_CONTROL_DIRECTORY)
parser = argparse.ArgumentParser(description='Enter Sample Number')
parser.add_argument('sample_number', type=int, default=1)
args = parser.parse_args()
current_sample_number = args.sample_number
SAMPLE_RANGE = [current_sample_number]
current_stats_filename = os.path.join(BASE_OUTPUT_DIRECTORY,'etl_stats.txt_{0}'.format(current_sample_number))
if os.path.exists(current_stats_filename): os.unlink(current_stats_filename)
log_stats('CMS_ETL starting')
log_stats('BASE_SYNPUF_INPUT_DIRECTORY =' + BASE_SYNPUF_INPUT_DIRECTORY)
log_stats('BASE_OUTPUT_DIRECTORY =' + BASE_OUTPUT_DIRECTORY)
log_stats('BASE_ETL_CONTROL_DIRECTORY =' + BASE_ETL_CONTROL_DIRECTORY)
file_control = FileControl(BASE_SYNPUF_INPUT_DIRECTORY, BASE_OUTPUT_DIRECTORY, SYNPUF_DIR_FORMAT, current_sample_number)
file_control.delete_all_output()
print '-'*80
print '-- all files present....'
print '-'*80
#Set up initial identifier counters
table_ids = Table_ID_Values()
table_ids_filename = os.path.join(BASE_ETL_CONTROL_DIRECTORY, 'etl_synpuf_last_table_ids.txt')
if os.path.exists(table_ids_filename):
table_ids.Load(table_ids_filename, log_stats)
# Build mappings between SynPUF codes and OMOP Vocabulary concept_ids
build_maps()
bene_dump_filename = os.path.join(BASE_OUTPUT_DIRECTORY,'beneficiary_dump_{0}.txt'.format(current_sample_number))
omop_unmapped_code_file = os.path.join(BASE_ETL_CONTROL_DIRECTORY,'unmapped_code_log.txt')
unmapped_log = open(omop_unmapped_code_file, 'a+')
# Build the object to manage access to all the files
write_header_records()
with open(bene_dump_filename,'w') as fout:
beneficiary_fd = file_control.get_Descriptor('beneficiary')
log_stats('-'*80)
log_stats('reading beneficiary file -> '+ beneficiary_fd.complete_pathname)
log_stats('last_person_id starting value -> ' + str(table_ids.last_person_id))
recs_in = 0
rec = ''
save_DESYNPUF_ID = ''
unique_DESYNPUF_ID_count = 0
bene = None
try:
with beneficiary_fd.open() as fin:
# Skip header record
rec = fin.readline()
for rec in fin:
recs_in += 1
if recs_in % 10000 == 0: print 'beneficiary recs_in: ', recs_in
rec = rec.split(',')
DESYNPUF_ID = rec[BENEFICIARY_SUMMARY_RECORD.DESYNPUF_ID]
SP_STATE_CODE = rec[BENEFICIARY_SUMMARY_RECORD.SP_STATE_CODE]
BENE_COUNTY_CD = rec[BENEFICIARY_SUMMARY_RECORD.BENE_COUNTY_CD]
# count on this header record field being in every file
if '"DESYNPUF_ID"' in rec:
continue
# check for bene break
if DESYNPUF_ID != save_DESYNPUF_ID:
if not bene is None:
process_beneficiary(bene)
unique_DESYNPUF_ID_count += 1
save_DESYNPUF_ID = DESYNPUF_ID
bene = Beneficiary(DESYNPUF_ID, table_ids.last_person_id, SP_STATE_CODE, BENE_COUNTY_CD)
table_ids.last_person_id += 1
#accumulate for the current bene
bene.AddYearData(rec)
if not bene is None:
process_beneficiary(bene)
except BaseException:
print '** ERROR reading beneficiary file, record number ', recs_in, '\n record-> ', rec
raise
beneficiary_fd.increment_recs_read(recs_in)
log_stats('last_person_id ending value -> ' + str(table_ids.last_person_id))
log_stats('Done: total records read ={0}, unique IDs={1}'.format(recs_in, unique_DESYNPUF_ID_count))
file_control.close_all()
#- save look up tables & last-used-ids
persist_lookup_tables()
table_ids.Save(table_ids_filename)
log_stats('CMS_ETL done')
log_stats('Input Records------')
for token in sorted(file_control.descriptor_list(which='input')):
fd = file_control.get_Descriptor(token)
log_stats('\tFile: {0:50}, records_read={1:10}'.format(fd.token, fd.records_read))
log_stats('Output Records------')
for token in sorted(file_control.descriptor_list(which='output')):
fd = file_control.get_Descriptor(token)
if fd.records_written > 1:
log_stats('\tFile: {0:50}, records_written={1:10}'.format(fd.token, fd.records_written))
print '** done **'
|
OHDSI/ETL-CMS
|
python_etl/CMS_SynPuf_ETL_CDM_v5.py
|
Python
|
apache-2.0
| 121,037
|
[
"VisIt"
] |
7fa21fe0e02cefbd4e780208dad17e82d4dc537e0ccbf391269822fcb1ccbcf1
|
paraview_plugin_version = '1.1.6'
# This is module to import. It provides VTKPythonAlgorithmBase, the base class
# for all python-based vtkAlgorithm subclasses in VTK and decorators used to
# 'register' the algorithm with ParaView along with information about UI.
from paraview.util.vtkAlgorithm import smproperty, smproxy
# Helpers:
from PVGeo import _helpers
# Classes to Decorate
from PVGeo.gmggroup import OMFReader
###############################################################################
@smproxy.reader(
name="PVGeoOMFReader",
label="PVGeo: Open Mining Format Project Reader",
extensions=OMFReader.extensions,
file_description=OMFReader.description,
)
class PVGeoOMFReader(OMFReader):
def __init__(self):
OMFReader.__init__(self)
#### Seters and Geters ####
# TODO: check this to make sure not time varying
@smproperty.xml(
_helpers.get_file_reader_xml(
OMFReader.extensions, reader_description=OMFReader.description
)
)
def AddFileName(self, filename):
OMFReader.AddFileName(self, filename)
# Array selection API is typical with readers in VTK
# This is intended to allow ability for users to choose which arrays to
# load. To expose that in ParaView, simply use the
# smproperty.dataarrayselection().
# This method **must** return a `vtkDataArraySelection` instance.
@smproperty.dataarrayselection(name="Project Data")
def GetDataSelection(self):
return OMFReader.GetDataSelection(self)
###############################################################################
# @smproxy.filter(name="PVGeoOMFExtractor", label="OMF Block Extractor")
# @smhint.xml('<ShowInMenu category="%s"/>' % 'PVGeo: OMF')
# @smproperty.input(name="MultiBlockInput", port_index=0)
# @smdomain.datatype(dataTypes=["vtkMultiBlockDataSet"], composite_data_supported=True)
# class PVGeoOMFExtractor(AlgorithmBase):
# def __init__(self):
# AlgorithmBase.__init__(self, nInputPorts=1, inputType='vtkMultiBlockDataSet',
# nOutputPorts=1, outputType='vtkPolyData')
# self.__block = 0
#
#
# #### Pipeline Methods ####
#
# # THIS IS CRUCIAL to preserve data type through filter
# def RequestDataObject(self, request, inInfo, outInfo):
# input = self.GetInputData(inInfo, 0, 0)
# obj = input.GetBlock(self.__block)
# self.OutputType = obj.GetClassName()
# self.FillOutputPortInformation(0, outInfo.GetInformationObject(0))
# outInfo.GetInformationObject(0).Set(vtk.vtkDataObject.DATA_OBJECT(), obj)
# return 1
#
#
# def RequestData(self, request, inInfo, outInfo):
# # Now extract the multiblock data set
# self.RequestDataObject(request, inInfo, outInfo)
# input = self.GetInputData(inInfo, 0, 0)
# output = self.GetOutputData(outInfo, 0)
# obj = input.GetBlock(self.__block)
# output.ShallowCopy(obj)
# print(outInfo)
# return 1
#
# @smproperty.xml('''
# <IntVectorProperty
# command="SetBlock"
# name="BlockIndices"
# label="Block Indices"
# animateable="1"
# repeat_command="0" >
# <CompositeTreeDomain
# mode="all"
# name="tree">
# <RequiredProperties>
# <Property function="Input"
# name="MultiBlockInput" />
# </RequiredProperties>
# </CompositeTreeDomain>
# <Documentation></Documentation>
# </IntVectorProperty>''')
# def SetBlock(self, block):
# if self.__block != block:
# self.__block = block
# self.Modified()
|
banesullivan/ParaViewGeophysics
|
PVPlugins/PVGeo_OMF.py
|
Python
|
bsd-3-clause
| 3,699
|
[
"ParaView",
"VTK"
] |
95d4ed47d7e7ea03df6c4b200d66ca7e47ae0ffc2de2f26662c486bb0309a693
|
""" :mod: SRM2Storage
=================
.. module: python
:synopsis: SRM v2 interface to StorageElement
"""
# # imports
import os
import re
import time
import errno
from stat import S_ISREG, S_ISDIR, S_IMODE, ST_MODE, ST_SIZE
# # from DIRAC
from DIRAC import gLogger, gConfig
from DIRAC.Core.Utilities import DErrno
from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR
from DIRAC.Resources.Storage.Utilities import checkArgumentFormat
from DIRAC.Resources.Storage.StorageBase import StorageBase
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup
from DIRAC.Core.Utilities.Subprocess import pythonCall
from DIRAC.Core.Utilities.List import breakListIntoChunks
from DIRAC.Core.Utilities.File import getSize
from DIRAC.Core.Utilities.Decorators import deprecated
# # RCSID
__RCSID__ = "$Id$"
@deprecated('Replaced by gfal2 based plugins', onlyOnce = True)
class SRM2Storage( StorageBase ):
""" .. class:: SRM2Storage
SRM v2 interface to StorageElement using lcg_util and gfal
"""
_INPUT_PROTOCOLS = ['file', 'srm']
_OUTPUT_PROTOCOLS = ['file', 'root', 'dcap', 'gsidcap', 'rfio', 'srm', 'gsiftp']
def __init__( self, storageName, parameters ):
""" c'tor
:param self: self reference
:param str storageName: SE name
:param dict parameters: dictionary of protocol parameters
"""
StorageBase.__init__( self, storageName, parameters )
self.spaceToken = self.protocolParameters['SpaceToken']
self.log = gLogger.getSubLogger( "SRM2Storage", True )
self.isok = True
# # placeholder for gfal reference
self.gfal = None
# # placeholder for lcg_util reference
self.lcg_util = None
# # save c'tor params
self.pluginName = 'SRM2'
# # stage limit - 12h
self.stageTimeout = gConfig.getValue( '/Resources/StorageElements/StageTimeout', 12 * 60 * 60 )
# # 1 file timeout
self.fileTimeout = gConfig.getValue( '/Resources/StorageElements/FileTimeout', 30 )
# # nb of surls per gfal call
self.filesPerCall = gConfig.getValue( '/Resources/StorageElements/FilesPerCall', 20 )
# # gfal timeout
self.gfalTimeout = gConfig.getValue( "/Resources/StorageElements/GFAL_Timeout", 100 )
# # gfal long timeout
self.gfalLongTimeOut = gConfig.getValue( "/Resources/StorageElements/GFAL_LongTimeout", 1200 )
# # gfal retry on errno.ECONN
self.gfalRetry = gConfig.getValue( "/Resources/StorageElements/GFAL_Retry", 3 )
# # should busy files be considered to exist
self.busyFilesExist = gConfig.getValue( "/Resources/StorageElements/SRMBusyFilesExist", False )
# # set checksum type, by default this is 0 (GFAL_CKSM_NONE)
checksumType = gConfig.getValue( "/Resources/StorageElements/ChecksumType", '' )
# enum gfal_cksm_type, all in lcg_util
# GFAL_CKSM_NONE = 0,
# GFAL_CKSM_CRC32,
# GFAL_CKSM_ADLER32,
# GFAL_CKSM_MD5,
# GFAL_CKSM_SHA1
# GFAL_CKSM_NULL = 0
self.checksumTypes = { "CRC32" : 1, "ADLER32" : 2,
"MD5" : 3, "SHA1" : 4, "NONE" : 0, "NULL" : 0 }
self.checksumType = self.checksumTypes.get( checksumType.upper(), 0 )
if self.checksumType:
gLogger.debug( "SRM2Storage: will use %s checksum check" % self.checksumType )
elif checksumType:
gLogger.warn( "SRM2Storage: unknown checksum, check disabled", checksumType )
else:
self.log.debug( "SRM2Storage: will use no checksum" )
# setting some variables for use with lcg_utils
self.nobdii = 1
self.defaulttype = 2
self.voName = None
ret = getProxyInfo( disableVOMS = True )
if ret['OK'] and 'group' in ret['Value']:
self.voName = getVOForGroup( ret['Value']['group'] )
# enable lcg-utils debugging for debug level DEBUG
lcgdebuglevel = 0
dlevel = self.log.getLevel()
if dlevel == 'DEBUG':
lcgdebuglevel = 999
self.verbose = lcgdebuglevel
self.conf_file = 'ignored'
self.insecure = 0
self.defaultLocalProtocols = gConfig.getValue( '/Resources/StorageElements/DefaultProtocols', [] )
self.MAX_SINGLE_STREAM_SIZE = 1024 * 1024 * 10 # 10 MB ???
self.MIN_BANDWIDTH = 0.5 * ( 1024 * 1024 ) # 0.5 MB/s ???
def __importExternals( self ):
""" import lcg_util and gfalthr or gfal
:param self: self reference
"""
if ( self.lcg_util ) and ( self.gfal ):
return S_OK()
# # get lcg_util
try:
import lcg_util
self.log.debug( "Using lcg_util version %s from %s" % ( lcg_util.lcg_util_version(),
lcg_util.__file__ ) )
except ImportError, error:
gLogger.exception( "__importExternals: Failed to import lcg_util", "", error )
return S_ERROR( DErrno.EIMPERR, error )
# # and gfalthr
try:
import gfalthr as gfal
self.log.debug( 'Using gfalthr version %s from %s' % ( gfal.gfal_version(),
gfal.__file__ ) )
except ImportError, error:
self.log.warn( "__importExternals: Failed to import gfalthr: %s." % error )
# # so gfal maybe?
try:
import gfal
self.log.debug( "Using gfal version %s from %s" % ( gfal.gfal_version(),
gfal.__file__ ) )
except ImportError, error:
gLogger.exception( "__importExternals: Failed to import gfal", "", error )
return S_ERROR( DErrno.EIMPERR, error )
self.lcg_util = lcg_util
self.gfal = gfal
return S_OK()
################################################################################
#
# The methods below are URL manipulation methods
#
################################################################################
def __convertRandomSRMOutputIntoAFullURL( self, srmPath ):
""" When calling gfal operation, srm sometimes returns as a surl just the physical path on the storage
without the host, port and else. Sometimes it is the full surl. Sometimes it doesn't have the WSUrl.
So we correct all this and make sure that we return to the caller a full surl.
/my/base/path/the/lfn.raw -> srm://host:port/srm/v2/server?SFN=/my/base/path/the/lfn.raw
"""
from DIRAC.Core.Utilities.Pfn import pfnunparse, pfnparse
# if self.isURL( srmPath )['Value']:
if ':' in srmPath:
dic = pfnparse( srmPath )['Value']
dic['WSUrl'] = self.protocolParameters['WSUrl']
srmPath = pfnunparse( dic )['Value']
return S_OK( srmPath )
urlDict = dict( self.protocolParameters )
urlDict['Path'] = ''
unp = pfnunparse( urlDict )['Value']
unp = os.path.join( unp, srmPath.lstrip( '/' ) )
return S_OK( unp )
#############################################################
#
# These are the methods for directory manipulation
#
######################################################################
#
# This has to be updated once the new gfal_makedir() becomes available
# TODO: isn't it there? when somebody made above comment?
#
def createDirectory( self, path ):
""" mkdir -p path on storage
:param self: self reference
:param str path:
"""
urls = checkArgumentFormat( path )
if not urls['OK']:
return urls
urls = urls['Value']
successful = {}
failed = {}
self.log.debug( "createDirectory: Attempting to create %s directories." % len( urls ) )
for url in urls:
strippedUrl = url.rstrip( '/' )
res = self.__makeDirs( strippedUrl )
if res['OK']:
self.log.debug( "createDirectory: Successfully created directory on storage: %s" % url )
successful[url] = True
else:
self.log.error( "createDirectory: Failed to create directory on storage.",
"\n%s: \n%s" % ( url, res['Message'] ) )
failed[url] = res['Message']
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def __makeDir( self, path ):
""" mkdir path in a weird way
:param self: self reference
:param str path:
"""
srcFile = os.path.join( os.environ.get( 'TMPDIR', os.environ.get( 'TMP', '/tmp' ) ), 'dirac_directory' )
if not os.path.exists( srcFile ):
dfile = open( srcFile, 'w' )
dfile.write( " " )
dfile.close()
destFile = os.path.join( path, 'dirac_directory.%s' % time.time() )
res = self.__putFile( srcFile, destFile, 0, checkExists = False )
if res['OK']:
self.__executeOperation( destFile, 'removeFile' )
return res
def __makeDirs( self, path ):
""" black magic contained within...
:param self: self reference
:param str path: dir name
"""
res = self.__executeOperation( path, 'exists' )
if not res['OK']:
return res
if res['Value']:
return S_OK()
# directory doesn't exist, create it
dirName = os.path.dirname( path )
res = self.__executeOperation( dirName, 'exists' )
if not res['OK']:
return res
if not res['Value']:
res = self.__makeDirs( dirName )
if not res['OK']:
return res
return self.__makeDir( path )
################################################################################
#
# The methods below use the new generic methods for executing operations
#
################################################################################
def removeFile( self, path ):
""" rm path on storage
:param self: self reference
:param str path: file path
"""
log = self.log.getSubLogger( 'removeFile' )
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "removeFile: Performing the removal of %s file(s)" % len( urls ) )
resDict = self.__gfaldeletesurls_wrapper( urls )
if not resDict["OK"]:
self.log.error( "Failed removeFile", "%s" % resDict["Message"] )
return resDict
resDict = resDict['Value']
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "removeFile: Successfully removed file: %s" % pathSURL )
successful[pathSURL] = True
elif urlDict['status'] == 2:
# This is the case where the file doesn't exist.
self.log.debug( "removeFile: File did not exist, successfully removed: %s" % pathSURL )
successful[pathSURL] = True
else:
errStr = "removeFile: Failed to remove file."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def getTransportURL( self, path, protocols = False ):
""" obtain the tURLs for the supplied path and protocols
:param self: self reference
:param str path: path on storage
:param mixed protocols: protocols to use
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
if not protocols:
protocols = self.__getProtocols()
if not protocols['OK']:
return protocols
listProtocols = protocols['Value']
elif isinstance( protocols, basestring ):
listProtocols = [protocols]
elif isinstance( protocols, list ):
listProtocols = protocols
else:
return S_ERROR( errno.EPROTO, "getTransportURL: Must supply desired protocols to this plug-in." )
if self.protocolParameters['Protocol'] in listProtocols:
successful = {}
failed = {}
for url in urls:
if self.isURL( url )['Value']:
successful[url] = url
else:
failed[url] = 'getTransportURL: Failed to obtain turls.'
return S_OK( {'Successful' : successful, 'Failed' : failed} )
if not self.se.status()['Read']:
return S_ERROR( "SRM2Storage.getTransportURL: Read access not currently permitted." )
# Here we must go out to the SRM service
self.log.debug( "getTransportURL: Obtaining tURLs for %s file(s)." % len( urls ) )
resDict = self.__gfalturlsfromsurls_wrapper( urls, listProtocols )
if not resDict["OK"]:
self.log.error( "Failed getTransportURL", "%s" % resDict["Message"] )
return resDict
resDict = resDict['Value']
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "getTransportURL: Obtained tURL for file. %s" % pathSURL )
successful[pathSURL] = urlDict['turl']
elif urlDict['status'] == 2:
errMessage = "File does not exist"
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "getTransportURL: Failed to obtain turls."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def prestageFile( self, path, lifetime = 86400 ):
""" Issue prestage request for file
:param self: self reference
:param str path: PFN path
:param int lifetime: prestage lifetime in seconds (default 24h)
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "prestageFile: Attempting to issue stage requests for %s file(s)." % len( urls ) )
resDict = self.__gfal_prestage_wrapper( urls, lifetime )
if not resDict["OK"]:
self.log.error( "Failed prestageFile", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "prestageFile: Issued stage request for file %s." % pathSURL )
successful[pathSURL] = urlDict['SRMReqID']
elif urlDict['status'] == 1:
self.log.debug( "prestageFile: File found to be already staged.", pathSURL )
successful[pathSURL] = urlDict['SRMReqID']
# It can be 11 or 22 depending on the srm-ifce version...
elif urlDict['status'] in ( 11, 22 ):
self.log.debug( "prestageFile: Stage request for file %s queued.", pathSURL )
successful[pathSURL] = urlDict['SRMReqID']
elif urlDict['status'] == 2:
errMessage = "prestageFile: File does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "prestageFile: Failed issue stage request."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( errMessage, pathSURL ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def prestageFileStatus( self, path ):
""" Monitor prestage request for files
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "prestageFileStatus: Attempting to get status "
"of stage requests for %s file(s)." % len( urls ) )
resDict = self.__gfal_prestagestatus_wrapper( urls )
if not resDict["OK"]:
self.log.error( "Failed prestageFileStatus", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 1:
self.log.debug( "SRM2Storage.prestageFileStatus: File found to be staged %s." % pathSURL )
successful[pathSURL] = True
elif urlDict['status'] == 0:
self.log.debug( "SRM2Storage.prestageFileStatus: File not staged %s." % pathSURL )
successful[pathSURL] = False
elif urlDict['status'] == 2:
errMessage = "SRM2Storage.prestageFileStatus: File does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "SRM2Storage.prestageFileStatus: Failed get prestage status."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( errMessage, pathSURL ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def getFileMetadata( self, path ):
""" Get metadata associated to the file
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
failed = {}
self.log.debug( "getFileMetadata: Obtaining metadata for %s file(s)." % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed getFileMetadata:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed.update( resDict['Failed'] )
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if urlDict.get( 'surl' ):
# Get back the input value for that surl
path = urlDict['surl']
if urlDict['status'] == 0:
statDict = self.__parse_file_metadata( urlDict )
if statDict['File']:
successful[path] = statDict
else:
errStr = "getFileMetadata: Supplied path is not a file."
self.log.error( errStr, path )
failed[path] = errStr
elif urlDict['status'] == 2:
errMessage = "getFileMetadata: File does not exist."
self.log.error( errMessage, path )
failed[path] = errMessage
else:
errStr = "SRM2Storage.getFileMetadata: Failed to get file metadata."
errMessage = "%s: %s" % ( path, urlDict['ErrorMessage'] )
self.log.error( errStr, errMessage )
failed[path] = "%s %s" % ( errStr, urlDict['ErrorMessage'] )
else:
errStr = "getFileMetadata: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def isFile( self, path ):
"""Check if the given path exists and it is a file
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "isFile: Checking whether %s path(s) are file(s)." % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed isFile:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
statDict = self.__parse_file_metadata( urlDict )
if statDict['File']:
successful[pathSURL] = True
else:
self.log.debug( "isFile: Path is not a file: %s" % pathSURL )
successful[pathSURL] = False
elif urlDict['status'] == 2:
errMessage = "isFile: File does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "isFile: Failed to get file metadata."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
else:
errStr = "isFile: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def pinFile( self, path, lifetime = 86400 ):
""" Pin a file with a given lifetime
:param self: self reference
:param str path: PFN path
:param int lifetime: pin lifetime in seconds (default 24h)
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "pinFile: Attempting to pin %s file(s)." % len( urls ) )
resDict = self.__gfal_pin_wrapper( urls, lifetime )
if not resDict["OK"]:
self.log.error( "Failed pinFile:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "pinFile: Issued pin request for file %s." % pathSURL )
successful[pathSURL] = urlDict['SRMReqID']
elif urlDict['status'] == 2:
errMessage = "pinFile: File does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "pinFile: Failed issue pin request."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( errMessage, pathSURL ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def releaseFile( self, path ):
""" Release a pinned file
:param self: self reference
:param str path: PFN path
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "releaseFile: Attempting to release %s file(s)." % len( urls ) )
resDict = self.__gfal_release_wrapper( urls )
if not resDict["OK"]:
self.log.error( "Failed releaseFile:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "Failed releaseFile:", "Issued release request for file %s." % pathSURL )
successful[pathSURL] = urlDict['SRMReqID']
elif urlDict['status'] == 2:
errMessage = "releaseFile: File does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "releaseFile: Failed issue release request."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( errMessage, pathSURL ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def exists( self, path ):
""" Check if the given path exists. """
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.exists: Checking the existance of %s path(s)" % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed exists:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "SRM2Storage.exists: Path exists: %s" % pathSURL )
successful[pathSURL] = True
elif urlDict['status'] in ( 16, 22 ) and self.busyFilesExist:
self.log.debug( "SRM2Storage.exists: Path exists, file busy (e.g., stage-out): %s" % pathSURL )
successful[pathSURL] = True
elif urlDict['status'] == 2:
self.log.debug( "SRM2Storage.exists: Path does not exist: %s" % pathSURL )
successful[pathSURL] = False
else:
errStr = "SRM2Storage.exists: Failed to get path metadata."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
else:
errStr = "SRM2Storage.exists: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def getFileSize( self, path ):
"""Get the physical size of the given file
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.getFileSize: Obtaining the size of %s file(s)." % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed getFileSize:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if urlDict.get( 'surl' ):
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
statDict = self.__parse_file_metadata( urlDict )
if statDict['File']:
successful[pathSURL] = statDict['Size']
else:
errStr = "SRM2Storage.getFileSize: Supplied path is not a file."
self.log.verbose( errStr, pathSURL )
failed[pathSURL] = errStr
elif urlDict['status'] == 2:
errMessage = "SRM2Storage.getFileSize: File does not exist."
self.log.verbose( errMessage, pathSURL )
failed[pathSURL] = errMessage
else:
errStr = "SRM2Storage.getFileSize: Failed to get file metadata."
errMessage = urlDict['ErrorMessage']
self.log.verbose( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
else:
errStr = "SRM2Storage.getFileSize: Returned element does not contain surl."
self.log.error( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def putFile( self, path, sourceSize = 0 ):
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
failed = {}
successful = {}
for dest_url, src_file in urls.items():
# Create destination directory
res = self.__executeOperation( os.path.dirname( dest_url ), 'createDirectory' )
if not res['OK']:
failed[dest_url] = res['Message']
else:
res = self.__putFile( src_file, dest_url, sourceSize )
if res['OK']:
successful[dest_url] = res['Value']
else:
failed[dest_url] = res['Message']
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def __putFile( self, src_file, dest_url, sourceSize, checkExists = True ):
""" put :src_file: to :dest_url:
:param self: self reference
:param str src_file: file path in local fs
:param str dest_url: destination url on storage
:param int sourceSize: :src_file: size in B
"""
if checkExists:
# Pre-transfer check
res = self.__executeOperation( dest_url, 'exists' )
if not res['OK']:
self.log.debug( "__putFile: Failed to find pre-existance of destination file." )
return res
if res['Value']:
res = self.__executeOperation( dest_url, 'removeFile' )
if not res['OK']:
self.log.debug( "__putFile: Failed to remove remote file %s." % dest_url )
else:
self.log.debug( "__putFile: Removed remote file %s." % dest_url )
dsttype = self.defaulttype
src_spacetokendesc = ''
dest_spacetokendesc = self.spaceToken
if re.search( 'srm:', src_file ):
src_url = src_file
srctype = 2
if not sourceSize:
return S_ERROR( errno.EINVAL, "__putFile: For file replication the source file size must be provided." )
else:
if not os.path.exists( src_file ):
errStr = "__putFile: The source local file does not exist."
self.log.error( errStr, src_file )
return S_ERROR( errno.ENOENT, errStr )
sourceSize = getSize( src_file )
if sourceSize == -1:
errStr = "__putFile: Failed to get file size."
self.log.error( errStr, src_file )
return S_ERROR( DErrno.EFILESIZE, errStr )
src_url = 'file:%s' % src_file
srctype = 0
if sourceSize == 0:
errStr = "__putFile: Source file is zero size."
self.log.error( errStr, src_file )
return S_ERROR( DErrno.EFILESIZE, errStr )
timeout = int( sourceSize / self.MIN_BANDWIDTH + 300 )
if sourceSize > self.MAX_SINGLE_STREAM_SIZE:
nbstreams = 4
else:
nbstreams = 1
self.log.info( "__putFile: Executing transfer of %s to %s using %s streams" % ( src_url, dest_url, nbstreams ) )
res = pythonCall( ( timeout + 10 ), self.__lcg_cp_wrapper, src_url, dest_url,
srctype, dsttype, nbstreams, timeout, src_spacetokendesc, dest_spacetokendesc )
if not res['OK']:
# Remove the failed replica, just in case
result = self.__executeOperation( dest_url, 'removeFile' )
if result['OK']:
self.log.debug( "__putFile: Removed remote file remnant %s." % dest_url )
else:
self.log.debug( "__putFile: Unable to remove remote file remnant %s." % dest_url )
return res
res = res['Value']
if not res['OK']: # pylint: disable=invalid-sequence-index
# Remove the failed replica, just in case
result = self.__executeOperation( dest_url, 'removeFile' )
if result['OK']:
self.log.debug( "__putFile: Removed remote file remnant %s." % dest_url )
else:
self.log.debug( "__putFile: Unable to remove remote file remnant %s." % dest_url )
return res
errCode, errStr = res['Value'] # pylint: disable=invalid-sequence-index
if errCode == 0:
self.log.info( '__putFile: Successfully put file to storage.' )
# # checksum check? return!
if self.checksumType:
return S_OK( sourceSize )
# # else compare sizes
res = self.__executeOperation( dest_url, 'getFileSize' )
if res['OK']:
destinationSize = res['Value']
if sourceSize == destinationSize :
self.log.debug( "__putFile: Post transfer check successful." )
return S_OK( destinationSize )
errorMessage = "__putFile: Source and destination file sizes do not match."
errObj = S_ERROR( DErrno.EFILESIZE, errorMessage )
self.log.error( errorMessage, src_url )
else:
errorMessage = "__putFile: Failed to put file to storage."
errObj = S_ERROR( errCode, errorMessage )
if errCode > 0:
errStr = "%s %s" % ( errStr, os.strerror( errCode ) )
self.log.error( errorMessage, errStr )
res = self.__executeOperation( dest_url, 'removeFile' )
if res['OK']:
self.log.debug( "__putFile: Removed remote file remnant %s." % dest_url )
else:
self.log.debug( "__putFile: Unable to remove remote file remnant %s." % dest_url )
return errObj
def __lcg_cp_wrapper( self, src_url, dest_url, srctype, dsttype, nbstreams,
timeout, src_spacetokendesc, dest_spacetokendesc ):
""" lcg_util.lcg_cp wrapper
:param self: self reference
:param str src_url: source SURL
:param str dest_url: destination SURL
:param srctype: source SE type
:param dsttype: destination SE type
:param int nbstreams: nb of streams used for trasnfer
:param int timeout: timeout in seconds
:param str src_spacetoken: source space token
:param str dest_spacetoken: destination space token
"""
try:
errCode, errStr = self.lcg_util.lcg_cp4( src_url,
dest_url,
self.defaulttype,
srctype,
dsttype,
self.nobdii,
self.voName,
nbstreams,
self.conf_file,
self.insecure,
self.verbose,
timeout,
src_spacetokendesc,
dest_spacetokendesc,
self.checksumType )
if not isinstance( errCode, int ):
self.log.error( "__lcg_cp_wrapper: Returned errCode was not an integer",
"%s %s" % ( errCode, type( errCode ) ) )
if isinstance( errCode, list ):
msg = []
for err in errCode:
msg.append( '%s of type %s' % ( err, type( err ) ) )
self.log.error( "__lcg_cp_wrapper: Returned errCode was List:\n" , "\n".join( msg ) )
return S_ERROR( DErrno.EGFAL, "__lcg_cp_wrapper: Returned errCode was not an integer %s" % msg )
if not isinstance( errStr, basestring ):
self.log.error( "__lcg_cp_wrapper: Returned errStr was not a string",
"%s %s" % ( errCode, type( errStr ) ) )
return S_ERROR( DErrno.EGFAL, "__lcg_cp_wrapper: Returned errStr was not a string" )
return S_OK( ( errCode, errStr ) )
except Exception, error:
self.log.exception( "__lcg_cp_wrapper", "", error )
return S_ERROR( DErrno.EGFAL, "__lcg_cp_wrapper:Exception while attempting file upload %s" % error )
def getFile( self, path, localPath = False ):
""" make a local copy of a storage :path:
:param self: self reference
:param str path: path on storage
:param mixed localPath: if not specified, os.getcwd()
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
failed = {}
successful = {}
for src_url in urls:
fileName = os.path.basename( src_url )
if localPath:
dest_file = "%s/%s" % ( localPath, fileName )
else:
dest_file = "%s/%s" % ( os.getcwd(), fileName )
res = self.__getFile( src_url, dest_file )
if res['OK']:
successful[src_url] = res['Value']
else:
failed[src_url] = res['Message']
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def __getFile( self, src_url, dest_file ):
""" do a real copy of storage file :src_url: to local fs under :dest_file:
:param self: self reference
:param str src_url: SE url to cp
:param str dest_file: local fs path
"""
if not os.path.exists( os.path.dirname( dest_file ) ):
os.makedirs( os.path.dirname( dest_file ) )
if os.path.exists( dest_file ):
self.log.debug( "__getFile: Local file already exists %s. Removing..." % dest_file )
os.remove( dest_file )
srctype = self.defaulttype
src_spacetokendesc = self.spaceToken
dsttype = 0
dest_spacetokendesc = ''
dest_url = 'file:%s' % dest_file
res = self.__executeOperation( src_url, 'getFileSize' )
if not res['OK']:
return res
remoteSize = res['Value']
timeout = int( remoteSize / self.MIN_BANDWIDTH * 4 + 300 )
nbstreams = 1
self.log.info( "__getFile: Using %d streams" % nbstreams )
self.log.info( "__getFile: Executing transfer of %s to %s" % ( src_url, dest_url ) )
res = pythonCall( ( timeout + 10 ), self.__lcg_cp_wrapper, src_url, dest_url, srctype, dsttype,
nbstreams, timeout, src_spacetokendesc, dest_spacetokendesc )
if not res['OK']:
return res
res = res['Value']
if not res['OK']: # pylint:disable=invalid-sequence-index
return res
errCode, errStr = res['Value'] # pylint: disable=invalid-sequence-index
if errCode == 0:
self.log.debug( '__getFile: Got a file from storage.' )
localSize = getSize( dest_file )
if localSize == remoteSize:
self.log.debug( "__getFile: Post transfer check successful." )
return S_OK( localSize )
errorMessage = "__getFile: Source and destination file sizes do not match."
self.log.error( errorMessage, src_url )
else:
errorMessage = "__getFile: Failed to get file from storage."
if errCode > 0:
errStr = "%s %s" % ( errStr, os.strerror( errCode ) )
self.log.error( errorMessage, errStr )
if os.path.exists( dest_file ):
self.log.debug( "__getFile: Removing local file %s." % dest_file )
os.remove( dest_file )
return S_ERROR( errorMessage )
def __executeOperation( self, url, method ):
""" executes the requested :method: with the supplied url
:param self: self reference
:param str url: SE url
:param str method: fcn name
"""
fcn = None
if hasattr( self, method ) and callable( getattr( self, method ) ):
fcn = getattr( self, method )
if not fcn:
return S_ERROR( DErrno.ENOMETH, "Unable to invoke %s, it isn't a member funtion of SRM2Storage" % method )
res = fcn( url )
if not res['OK']:
return res
elif url not in res['Value']['Successful']:
if url not in res['Value']['Failed']:
if res['Value']['Failed'].values():
return S_ERROR( res['Value']['Failed'].values()[0] )
elif res['Value']['Successful'].values():
return S_OK( res['Value']['Successful'].values()[0] )
else:
self.log.error( 'Wrong Return structure', str( res['Value'] ) )
return S_ERROR( 'Wrong Return structure' )
return S_ERROR( res['Value']['Failed'][url] )
return S_OK( res['Value']['Successful'][url] )
############################################################################################
#
# Directory based methods
#
def isDirectory( self, path ):
""" isdir on storage path
:param self: self reference
:param str path: SE path
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.isDirectory: Checking whether %s path(s) are directory(ies)" % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed isDirectory:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if urlDict.get( 'surl' ):
dirSURL = urlDict['surl']
if urlDict['status'] == 0:
statDict = self.__parse_file_metadata( urlDict )
if statDict['Directory']:
successful[dirSURL] = True
else:
self.log.debug( "SRM2Storage.isDirectory: Path is not a directory: %s" % dirSURL )
successful[dirSURL] = False
elif urlDict['status'] == 2:
self.log.debug( "SRM2Storage.isDirectory: Supplied path does not exist: %s" % dirSURL )
failed[dirSURL] = S_ERROR( errno.ENOENT, '%s path does not exist' % dirSURL )
else:
errStr = "SRM2Storage.isDirectory: Failed to get file metadata."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( dirSURL, errMessage ) )
failed[dirSURL] = S_ERROR( DErrno.EGFAL, "Failed to get file metadata %s" % errMessage )
else:
errStr = "SRM2Storage.isDirectory: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def getDirectoryMetadata( self, path ):
""" get the metadata for the directory :path:
:param self: self reference
:param str path: SE path
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "getDirectoryMetadata: Attempting to obtain metadata for %s directories." % len( urls ) )
resDict = self.__gfal_ls_wrapper( urls, 0 )
if not resDict["OK"]:
self.log.error( "Failed getDirectoryMetadata:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if "surl" in urlDict and urlDict["surl"]:
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
statDict = self.__parse_file_metadata( urlDict )
if statDict['Directory']:
statDict['Exists'] = True
statDict['Type'] = 'Directory'
successful[pathSURL] = statDict
else:
errStr = "SRM2Storage.getDirectoryMetadata: Supplied path is not a directory."
self.log.error( errStr, pathSURL )
failed[pathSURL] = errStr
elif urlDict['status'] == 2:
errMessage = "SRM2Storage.getDirectoryMetadata: Directory does not exist."
self.log.error( errMessage, pathSURL )
failed[pathSURL] = S_ERROR( errno.ENOENT, 'SRM2Storage.getDirectoryMetadata: %s does not exist' % pathSURL )
else:
errStr = "SRM2Storage.getDirectoryMetadata: Failed to get directory metadata."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = S_ERROR( DErrno.EGFAL, "Failed to get file metadata %s" % errMessage )
else:
errStr = "SRM2Storage.getDirectoryMetadata: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def getDirectorySize( self, path ):
""" Get the size of the directory on the storage
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.getDirectorySize: Attempting to get size of %s directories." % len( urls ) )
res = self.listDirectory( urls )
if not res['OK']:
return res
failed = res['Value']['Failed']
successful = {}
for directory, dirDict in res['Value']['Successful'].items():
directorySize = 0
directoryFiles = 0
filesDict = dirDict['Files']
for fileDict in filesDict.itervalues():
directorySize += fileDict['Size']
directoryFiles += 1
self.log.debug( "SRM2Storage.getDirectorySize: Successfully obtained size of %s." % directory )
subDirectories = len( dirDict['SubDirs'] )
successful[directory] = { 'Files' : directoryFiles, 'Size' : directorySize, 'SubDirs' : subDirectories }
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def listDirectory( self, path, internalCall = False ):
""" List the contents of the directory on the storage
:param interalCall: if this method is called from within
that class, we should return index on SURL, not LFNs
Do not set it to True for a normal call, unless you really
know what you are doing !!
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.listDirectory: Attempting to list %s directories." % len( urls ) )
# The gfal method returns an url, while we want to return an LFN to the user
urlStart = self.getURLBase( withWSUrl = True )['Value']
res = self.isDirectory( urls )
if not res['OK']:
return res
failed = res['Value']['Failed']
directories = {}
for url, isDirectory in res['Value']['Successful'].items():
if isDirectory:
directories[url] = False
else:
errStr = "SRM2Storage.listDirectory: Directory does not exist."
self.log.error( errStr, url )
failed[url] = errStr
resDict = self.__gfal_lsdir_wrapper( directories )
if not resDict["OK"]:
self.log.error( "Failed listDirectory:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
# resDict = self.__gfalls_wrapper(directories,1)['Value']
failed.update( resDict['Failed'] )
listOfResults = resDict['AllResults']
successful = {}
for urlDict in listOfResults:
if "surl" in urlDict and urlDict["surl"]:
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
successful[pathSURL] = {}
self.log.debug( "SRM2Storage.listDirectory: Successfully listed directory %s" % pathSURL )
subPathDirs = {}
subPathFiles = {}
if "subpaths" in urlDict:
subPaths = urlDict['subpaths']
# Parse the subpaths for the directory
for subPathDict in subPaths:
subPathSURL = subPathDict['surl']
if subPathDict['status'] == 22:
self.log.error( "File found with status 22", subPathDict )
elif subPathDict['status'] == 0:
statDict = self.__parse_file_metadata( subPathDict )
# Replace the URL with an LFN in normal cases, but return the SURL if it is an internal call
subPathLFN = subPathSURL if internalCall else subPathSURL.replace( urlStart, '' )
if statDict['File']:
subPathFiles[subPathLFN] = statDict
elif statDict['Directory']:
subPathDirs[subPathLFN] = statDict
# Keep the infomation about this path's subpaths
successful[pathSURL]['SubDirs'] = subPathDirs
successful[pathSURL]['Files'] = subPathFiles
else:
errStr = "SRM2Storage.listDirectory: Failed to list directory."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
else:
errStr = "SRM2Storage.listDirectory: Returned element does not contain surl."
self.log.fatal( errStr, self.name )
return S_ERROR( errno.ENOMSG, errStr )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def putDirectory( self, path ):
""" cp -R local SE
puts a local directory to the physical storage together with all its files and subdirectories
:param self: self reference
:param str path: local fs path
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
successful = {}
failed = {}
self.log.debug( "SRM2Storage.putDirectory: Attemping to put %s directories to remote storage." % len( urls ) )
for destDir, sourceDir in urls.items():
res = self.__putDir( sourceDir, destDir )
if res['OK']:
if res['Value']['AllPut']:
self.log.debug( "SRM2Storage.putDirectory: Successfully put directory to remote storage: %s" % destDir )
successful[destDir] = { 'Files' : res['Value']['Files'], 'Size' : res['Value']['Size']}
else:
self.log.error( "SRM2Storage.putDirectory: Failed to put entire directory to remote storage.", destDir )
failed[destDir] = { 'Files' : res['Value']['Files'], 'Size' : res['Value']['Size']}
else:
self.log.error( "SRM2Storage.putDirectory: Completely failed to put directory to remote storage.", destDir )
failed[destDir] = { "Files" : 0, "Size" : 0 }
return S_OK( { "Failed" : failed, "Successful" : successful } )
def __putDir( self, src_directory, dest_directory ):
""" Black magic contained within...
"""
filesPut = 0
sizePut = 0
# Check the local directory exists
if not os.path.isdir( src_directory ):
errStr = "SRM2Storage.__putDir: The supplied directory does not exist."
self.log.error( errStr, src_directory )
return S_ERROR( errno.ENOENT, errStr )
# Get the local directory contents
contents = os.listdir( src_directory )
allSuccessful = True
directoryFiles = {}
for fileName in contents:
localPath = '%s/%s' % ( src_directory, fileName )
remotePath = '%s/%s' % ( dest_directory, fileName )
if not os.path.isdir( localPath ):
directoryFiles[remotePath] = localPath
else:
res = self.__putDir( localPath, remotePath )
if not res['OK']:
errStr = "SRM2Storage.__putDir: Failed to put directory to storage."
self.log.error( errStr, res['Message'] )
else:
if not res['Value']['AllPut']:
pathSuccessful = False
filesPut += res['Value']['Files']
sizePut += res['Value']['Size']
if directoryFiles:
res = self.putFile( directoryFiles )
if not res['OK']:
self.log.error( "SRM2Storage.__putDir: Failed to put files to storage.", res['Message'] )
allSuccessful = False
else:
for fileSize in res['Value']['Successful'].itervalues():
filesPut += 1
sizePut += fileSize
if res['Value']['Failed']:
allSuccessful = False
return S_OK( { 'AllPut' : allSuccessful, 'Files' : filesPut, 'Size' : sizePut } )
def getDirectory( self, path, localPath = False ):
""" Get a local copy in the current directory of a physical file specified by its path
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
failed = {}
successful = {}
self.log.debug( "SRM2Storage.getDirectory: Attempting to get local copies of %s directories." % len( urls ) )
for src_dir in urls:
dirName = os.path.basename( src_dir )
if localPath:
dest_dir = "%s/%s" % ( localPath, dirName )
else:
dest_dir = "%s/%s" % ( os.getcwd(), dirName )
res = self.__getDir( src_dir, dest_dir )
if res['OK']:
if res['Value']['AllGot']:
self.log.debug( "SRM2Storage.getDirectory: Successfully got local copy of %s" % src_dir )
successful[src_dir] = {'Files':res['Value']['Files'], 'Size':res['Value']['Size']}
else:
self.log.error( "SRM2Storage.getDirectory: Failed to get entire directory.", src_dir )
failed[src_dir] = {'Files':res['Value']['Files'], 'Size':res['Value']['Size']}
else:
self.log.error( "SRM2Storage.getDirectory: Completely failed to get local copy of directory.", src_dir )
failed[src_dir] = {'Files':0, 'Size':0}
return S_OK( {'Failed' : failed, 'Successful' : successful } )
def __getDir( self, srcDirectory, destDirectory ):
""" Black magic contained within...
"""
filesGot = 0
sizeGot = 0
# Check the remote directory exists
res = self.__executeOperation( srcDirectory, 'isDirectory' )
if not res['OK']:
self.log.error( "SRM2Storage.__getDir: Failed to find the supplied source directory.", srcDirectory )
return res
if not res['Value']:
errStr = "SRM2Storage.__getDir: The supplied source path is not a directory."
self.log.error( errStr, srcDirectory )
return S_ERROR( errno.ENOTDIR, errStr )
# Check the local directory exists and create it if not
if not os.path.exists( destDirectory ):
os.makedirs( destDirectory )
# Get the remote directory contents
res = self.__getDirectoryContents( srcDirectory )
if not res['OK']:
errStr = "SRM2Storage.__getDir: Failed to list the source directory."
self.log.error( errStr, srcDirectory )
filesToGet = res['Value']['Files']
subDirs = res['Value']['SubDirs']
allSuccessful = True
res = self.getFile( filesToGet.keys(), destDirectory )
if not res['OK']:
self.log.error( "SRM2Storage.__getDir: Failed to get files from storage.", res['Message'] )
allSuccessful = False
else:
for fileSize in res['Value']['Successful'].itervalues():
filesGot += 1
sizeGot += fileSize
if res['Value']['Failed']:
allSuccessful = False
for subDir in subDirs:
subDirName = os.path.basename( subDir )
localPath = '%s/%s' % ( destDirectory, subDirName )
res = self.__getDir( subDir, localPath )
if res['OK']:
if not res['Value']['AllGot']:
allSuccessful = True
filesGot += res['Value']['Files']
sizeGot += res['Value']['Size']
return S_OK( { 'AllGot' : allSuccessful, 'Files' : filesGot, 'Size' : sizeGot } )
def removeDirectory( self, path, recursive = False ):
""" Remove a directory
"""
if recursive:
return self.__removeDirectoryRecursive( path )
else:
return self.__removeDirectory( path )
def __removeDirectory( self, directory ):
""" This function removes the directory on the storage
"""
res = checkArgumentFormat( directory )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( "SRM2Storage.__removeDirectory: Attempting to remove %s directories." % len( urls ) )
resDict = self.__gfal_removedir_wrapper( urls )
if not resDict["OK"]:
self.log.error( "Failed __removeDirectory:", "%s" % resDict["Message"] )
return resDict
resDict = resDict["Value"]
failed = resDict['Failed']
allResults = resDict['AllResults']
successful = {}
for urlDict in allResults:
if "surl" in urlDict:
pathSURL = urlDict['surl']
if urlDict['status'] == 0:
self.log.debug( "__removeDirectory: Successfully removed directory: %s" % pathSURL )
successful[pathSURL] = True
elif urlDict['status'] == 2:
# This is the case where the file doesn't exist.
self.log.debug( "__removeDirectory: Directory did not exist, sucessfully removed: %s" % pathSURL )
successful[pathSURL] = True
else:
errStr = "removeDirectory: Failed to remove directory."
errMessage = urlDict['ErrorMessage']
self.log.error( errStr, "%s: %s" % ( pathSURL, errMessage ) )
failed[pathSURL] = "%s %s" % ( errStr, errMessage )
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def __removeDirectoryRecursive( self, directory ):
""" Recursively removes the directory and sub dirs. Repeatedly calls itself to delete recursively.
"""
res = checkArgumentFormat( directory )
if not res['OK']:
return res
urls = res['Value']
successful = {}
failed = {}
self.log.debug( "SRM2Storage.__removeDirectory: Attempting to recursively remove %s directories." % len( urls ) )
for directory in urls:
self.log.debug( "SRM2Storage.removeDirectory: Attempting to remove %s" % directory )
res = self.__getDirectoryContents( directory )
resDict = {'FilesRemoved':0, 'SizeRemoved':0}
if not res['OK']:
failed[directory] = resDict
else:
filesToRemove = res['Value']['Files']
subDirs = res['Value']['SubDirs']
# Remove all the files in the directory
res = self.__removeDirectoryFiles( filesToRemove )
resDict['FilesRemoved'] += res['FilesRemoved']
resDict['SizeRemoved'] += res['SizeRemoved']
allFilesRemoved = res['AllRemoved']
# Remove all the sub-directories
res = self.__removeSubDirectories( subDirs )
resDict['FilesRemoved'] += res['FilesRemoved']
resDict['SizeRemoved'] += res['SizeRemoved']
allSubDirsRemoved = res['AllRemoved']
# If all the files and sub-directories are removed then remove the directory
allRemoved = False
if allFilesRemoved and allSubDirsRemoved:
self.log.debug( "SRM2Storage.removeDirectory: Successfully removed all files and sub-directories." )
res = self.__removeDirectory( directory )
if res['OK']:
if directory in res['Value']['Successful']:
self.log.debug( "SRM2Storage.removeDirectory: Successfully removed the directory %s." % directory )
allRemoved = True
# Report the result
if allRemoved:
successful[directory] = resDict
else:
failed[directory] = resDict
return S_OK ( { 'Failed' : failed, 'Successful' : successful } )
def __getDirectoryContents( self, directory ):
""" ls of storage element :directory:
:param self: self reference
:param str directory: SE path
"""
directory = directory.rstrip( '/' )
errMessage = "SRM2Storage.__getDirectoryContents: Failed to list directory."
res = self.listDirectory( directory, internalCall = True )
if not res['OK']:
self.log.error( errMessage, res['Message'] )
return res
if directory in res['Value']['Failed']:
self.log.error( errMessage, res['Value']['Failed'][directory] )
return S_ERROR( errMessage )
surlsDict = res['Value']['Successful'][directory]['Files']
subDirsDict = res['Value']['Successful'][directory]['SubDirs']
filesToRemove = dict( [ ( url, surlsDict[url]['Size'] ) for url in surlsDict ] )
return S_OK ( { 'Files' : filesToRemove, 'SubDirs' : subDirsDict.keys() } )
def __removeDirectoryFiles( self, filesToRemove ):
""" rm files from SE
:param self: self reference
:param dict filesToRemove: dict with surls as keys
"""
resDict = { 'FilesRemoved' : 0, 'SizeRemoved' : 0, 'AllRemoved' : True }
if len( filesToRemove ) > 0:
res = self.removeFile( filesToRemove.keys() )
if res['OK']:
for removedSurl in res['Value']['Successful']:
resDict['FilesRemoved'] += 1
resDict['SizeRemoved'] += filesToRemove[removedSurl]
if res['Value']['Failed']:
resDict['AllRemoved'] = False
self.log.debug( "SRM2Storage.__removeDirectoryFiles:",
"Removed %s files of size %s bytes." % ( resDict['FilesRemoved'], resDict['SizeRemoved'] ) )
return resDict
def __removeSubDirectories( self, subDirectories ):
""" rm -rf sub-directories
:param self: self reference
:param dict subDirectories: dict with surls as keys
"""
resDict = { 'FilesRemoved' : 0, 'SizeRemoved' : 0, 'AllRemoved' : True }
if len( subDirectories ) > 0:
res = self.__removeDirectoryRecursive( subDirectories )
if res['OK']:
for removedSubDir, removedDict in res['Value']['Successful'].items():
resDict['FilesRemoved'] += removedDict['FilesRemoved']
resDict['SizeRemoved'] += removedDict['SizeRemoved']
self.log.debug( "SRM2Storage.__removeSubDirectories:",
"Removed %s files of size %s bytes from %s." % ( removedDict['FilesRemoved'],
removedDict['SizeRemoved'],
removedSubDir ) )
for removedSubDir, removedDict in res['Value']['Failed'].items():
resDict['FilesRemoved'] += removedDict['FilesRemoved']
resDict['SizeRemoved'] += removedDict['SizeRemoved']
self.log.debug( "SRM2Storage.__removeSubDirectories:",
"Removed %s files of size %s bytes from %s." % ( removedDict['FilesRemoved'],
removedDict['SizeRemoved'],
removedSubDir ) )
if len( res['Value']['Failed'] ) != 0:
resDict['AllRemoved'] = False
return resDict
@staticmethod
def __parse_stat( stat ):
""" get size, ftype and mode from stat struct
:param stat: stat struct
"""
statDict = { 'File' : False, 'Directory' : False }
if S_ISREG( stat[ST_MODE] ):
statDict['File'] = True
statDict['Size'] = stat[ST_SIZE]
if S_ISDIR( stat[ST_MODE] ):
statDict['Directory'] = True
statDict['Mode'] = S_IMODE( stat[ST_MODE] )
return statDict
def __parse_file_metadata( self, urlDict ):
""" parse and save bits and pieces of metadata info
:param self: self reference
:param urlDict: gfal call results
"""
statDict = self.__parse_stat( urlDict['stat'] )
if statDict['File']:
statDict.setdefault( "Checksum", "" )
if "checksum" in urlDict and ( urlDict['checksum'] != '0x' ):
statDict["Checksum"] = urlDict["checksum"]
if 'locality' in urlDict:
urlLocality = urlDict['locality']
if re.search( 'ONLINE', urlLocality ):
statDict['Cached'] = 1
else:
statDict['Cached'] = 0
if re.search( 'NEARLINE', urlLocality ):
statDict['Migrated'] = 1
else:
statDict['Migrated'] = 0
statDict['Lost'] = 0
if re.search( 'LOST', urlLocality ):
statDict['Lost'] = 1
statDict['Unavailable'] = 0
if re.search( 'UNAVAILABLE', urlLocality ):
statDict['Unavailable'] = 1
statDict['Accessible'] = not statDict['Lost'] and statDict['Cached'] and not statDict['Unavailable']
else:
statDict['Cached'] = 0
statDict['Migrated'] = 0
statDict['Lost'] = 0
statDict['Unavailable'] = 1
statDict['Accessible'] = False
return self._addCommonMetadata( statDict )
def __getProtocols( self ):
""" returns list of protocols to use at a given site
:warn: priority is given to a protocols list defined in the CS
:param self: self reference
"""
sections = gConfig.getSections( '/Resources/StorageElements/%s/' % ( self.name ) )
if not sections['OK']:
return sections
protocolsList = []
for section in sections['Value']:
path = '/Resources/StorageElements/%s/%s/PluginName' % ( self.name, section )
if gConfig.getValue( path, '' ) == self.pluginName:
protPath = '/Resources/StorageElements/%s/%s/ProtocolsList' % ( self.name, section )
siteProtocols = gConfig.getValue( protPath, [] )
if siteProtocols:
self.log.debug( 'Found SE protocols list to override defaults:', ', '.join( siteProtocols, ) )
protocolsList = siteProtocols
if not protocolsList:
self.log.debug( "SRM2Storage.getTransportURL: No protocols provided, using defaults." )
protocolsList = gConfig.getValue( '/Resources/StorageElements/DefaultProtocols', [] )
if not protocolsList:
return S_ERROR( DErrno.ECONF, "SRM2Storage.getTransportURL: No local protocols defined and no defaults found" )
return S_OK( protocolsList )
#######################################################################
#
# These methods wrap the gfal functionality with the accounting. All these are based on __gfal_operation_wrapper()
#
#######################################################################
def __gfal_lsdir_wrapper( self, urls ):
""" This is a hack because the structures returned by the different SEs are different
"""
step = 200
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
gfalDict['srmv2_lslevels'] = 1
gfalDict['srmv2_lscount'] = step
failed = {}
successful = []
for url in urls:
allResults = []
gfalDict['surls'] = [url]
gfalDict['nbfiles'] = 1
gfalDict['timeout'] = self.gfalLongTimeOut
allObtained = False
iteration = 0
while not allObtained:
gfalDict['srmv2_lsoffset'] = iteration * step
iteration += 1
res = self.__gfal_operation_wrapper( 'gfal_ls', gfalDict )
# gDataStoreClient.addRegister( res['AccountingOperation'] )
if not res['OK']:
if re.search( r'\[SE\]\[Ls\]\[SRM_FAILURE\]', res['Message'] ):
allObtained = True
else:
failed[url] = res['Message']
else:
results = res['Value']
tempStep = step
if len( results ) == 1:
for result in results:
if 'subpaths' in result:
results = result['subpaths']
tempStep = step - 1
elif re.search( re.escape( result['surl'] ), url ):
results = []
allResults.extend( results )
if len( results ) < tempStep:
allObtained = True
for urlDict in allResults:
if 'surl' in urlDict:
urlDict['surl'] = self.__convertRandomSRMOutputIntoAFullURL( urlDict['surl'] )['Value']
successful.append( { 'surl' : url, 'status' : 0, 'subpaths' : allResults } )
# gDataStoreClient.commit()
return S_OK( { "AllResults" : successful, "Failed" : failed } )
def __gfal_ls_wrapper( self, urls, depth ):
""" gfal_ls wrapper
:param self: self reference
:param list urls: urls to check
:param int depth: srmv2_lslevel (0 or 1)
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
gfalDict['srmv2_lslevels'] = depth
allResults = []
failed = {}
listOfLists = breakListIntoChunks( urls.keys(), self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_ls', gfalDict )
# gDataStoreClient.addRegister( res['AccountingOperation'] )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
# gDataStoreClient.commit()
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_prestage_wrapper( self, urls, lifetime ):
""" gfal_prestage wrapper
:param self: self refefence
:param list urls: urls to prestage
:param int lifetime: prestage lifetime
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
gfalDict['srmv2_spacetokendesc'] = self.spaceToken
gfalDict['srmv2_desiredpintime'] = lifetime
gfalDict['protocols'] = self.defaultLocalProtocols
allResults = []
failed = {}
listOfLists = breakListIntoChunks( urls.keys(), self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.stageTimeout
res = self.__gfal_operation_wrapper( 'gfal_prestage',
gfalDict,
timeout_sendreceive = self.fileTimeout * len( urls ) )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfalturlsfromsurls_wrapper( self, urls, listProtocols ):
""" This is a function that can be reused everywhere to perform the gfal_turlsfromsurls
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
gfalDict['protocols'] = listProtocols
gfalDict['srmv2_spacetokendesc'] = self.spaceToken
allResults = []
failed = {}
listOfLists = breakListIntoChunks( urls.keys(), self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_turlsfromsurls', gfalDict )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfaldeletesurls_wrapper( self, urls ):
""" This is a function that can be reused everywhere to perform the gfal_deletesurls
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
allResults = []
failed = {}
listOfLists = breakListIntoChunks( urls.keys(), self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_deletesurls', gfalDict )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_removedir_wrapper( self, urls ):
""" This is a function that can be reused everywhere to perform the gfal_removedir
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 1
gfalDict['srmv2_spacetokendesc'] = self.spaceToken
allResults = []
failed = {}
listOfLists = breakListIntoChunks( urls.keys(), self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_removedir', gfalDict )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_pin_wrapper( self, urls, lifetime ):
""" gfal_pin wrapper
:param self: self reference
:param dict urls: dict { url : srmRequestID }
:param int lifetime: pin lifetime in seconds
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 0
gfalDict['srmv2_spacetokendesc'] = self.spaceToken
gfalDict['srmv2_desiredpintime'] = lifetime
allResults = []
failed = {}
srmRequestFiles = {}
for url, srmRequestID in urls.items():
if srmRequestID not in srmRequestFiles:
srmRequestFiles[srmRequestID] = []
srmRequestFiles[srmRequestID].append( url )
for srmRequestID, urls in srmRequestFiles.items():
listOfLists = breakListIntoChunks( urls, self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_pin', gfalDict, srmRequestID = srmRequestID )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_prestagestatus_wrapper( self, urls ):
""" gfal_prestagestatus wrapper
:param self: self reference
:param dict urls: dict { srmRequestID : [ url, url ] }
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 0
gfalDict['srmv2_spacetokendesc'] = self.spaceToken
allResults = []
failed = {}
srmRequestFiles = {}
for url, srmRequestID in urls.items():
if srmRequestID not in srmRequestFiles:
srmRequestFiles[srmRequestID] = []
srmRequestFiles[srmRequestID].append( url )
for srmRequestID, urls in srmRequestFiles.items():
listOfLists = breakListIntoChunks( urls, self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_prestagestatus', gfalDict, srmRequestID = srmRequestID )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_release_wrapper( self, urls ):
""" gfal_release wrapper
:param self: self reference
:param dict urls: dict { url : srmRequestID }
"""
gfalDict = {}
gfalDict['defaultsetype'] = 'srmv2'
gfalDict['no_bdii_check'] = 0
allResults = []
failed = {}
srmRequestFiles = {}
for url, srmRequestID in urls.items():
if srmRequestID not in srmRequestFiles:
srmRequestFiles[srmRequestID] = []
srmRequestFiles[srmRequestID].append( url )
for srmRequestID, urls in srmRequestFiles.items():
listOfLists = breakListIntoChunks( urls, self.filesPerCall )
for urls in listOfLists:
gfalDict['surls'] = urls
gfalDict['nbfiles'] = len( urls )
gfalDict['timeout'] = self.fileTimeout * len( urls )
res = self.__gfal_operation_wrapper( 'gfal_release', gfalDict, srmRequestID = srmRequestID )
if not res['OK']:
for url in urls:
failed[url] = res['Message']
else:
allResults.extend( res['Value'] )
return S_OK( { "AllResults" : allResults, "Failed" : failed } )
def __gfal_operation_wrapper( self, operation, gfalDict, srmRequestID = None, timeout_sendreceive = None ):
""" gfal fcn call wrapper
:param self: self reference
:param str operation: gfal fcn name
:param dict gfalDict: gfal dict passed to create gfal object
:param srmRequestID: srmRequestID
:param int timeout_sendreceive: gfal sendreceive timeout in seconds
"""
res = self.__importExternals()
if not res['OK']:
return res
# # timeout for one gfal_exec call
timeout = gfalDict['timeout'] if not timeout_sendreceive else timeout_sendreceive
# # pythonCall timeout ( const + timeout * ( 2 ** retry )
pyTimeout = 300 + ( timeout * ( 2 ** self.gfalRetry ) )
res = pythonCall( pyTimeout, self.__gfal_wrapper, operation, gfalDict, srmRequestID, timeout_sendreceive )
if not res['OK']:
return res
res = res['Value']
if res['OK']: # pylint: disable=invalid-sequence-index
for urlDict in res['Value']: # pylint: disable=invalid-sequence-index
if 'surl' in urlDict:
urlDict['surl'] = self.__convertRandomSRMOutputIntoAFullURL( urlDict['surl'] )['Value']
return res
def __gfal_wrapper( self, operation, gfalDict, srmRequestID = None, timeout_sendreceive = None ):
""" execute gfal :operation:
1. create gfalObject from gfalDict
2. set srmRequestID
3. call __gfal_exec
4. get gfal ids
5. get gfal results
6. destroy gfal object
:param self: self reference
:param str operation: fcn to call
:param dict gfalDict: gfal config dict
:param srmRequestID: srm request id
:param int timeout_sendrecieve: timeout for gfal send request and recieve results in seconds
"""
gfalObject = self.__create_gfal_object( gfalDict )
if not gfalObject["OK"]:
return gfalObject
gfalObject = gfalObject['Value']
if srmRequestID:
res = self.__gfal_set_ids( gfalObject, srmRequestID )
if not res['OK']:
return res
res = self.__gfal_exec( gfalObject, operation, timeout_sendreceive )
if not res['OK']:
return res
gfalObject = res['Value']
res = self.__gfal_get_ids( gfalObject )
if not res['OK']:
newSRMRequestID = srmRequestID
else:
newSRMRequestID = res['Value']
res = self.__get_results( gfalObject )
if not res['OK']:
return res
resultList = []
pfnRes = res['Value']
for myDict in pfnRes:
myDict['SRMReqID'] = newSRMRequestID
resultList.append( myDict )
self.__destroy_gfal_object( gfalObject )
return S_OK( resultList )
#######################################################################
#
# The following methods provide the interaction with gfal functionality
#
#######################################################################
def __create_gfal_object( self, gfalDict ):
""" create gfal object by calling gfal.gfal_init
:param self: self reference
:param dict gfalDict: gfal params dict
"""
self.log.debug( "SRM2Storage.__create_gfal_object: Performing gfal_init." )
errCode, gfalObject, errMessage = self.gfal.gfal_init( gfalDict )
if not errCode == 0:
errStr = "SRM2Storage.__create_gfal_object: Failed to perform gfal_init."
if not errMessage:
errMessage = os.strerror( self.gfal.gfal_get_errno() )
self.log.error( errStr, errMessage )
return S_ERROR( self.gfal.gfal_get_errno(), errMessage )
else:
self.log.debug( "SRM2Storage.__create_gfal_object: Successfully performed gfal_init." )
return S_OK( gfalObject )
def __gfal_set_ids( self, gfalObject, srmRequestID ):
""" set :srmRequestID:
:param self: self reference
:param gfalObject: gfal object
:param str srmRequestID: srm request id
"""
self.log.debug( "SRM2Storage.__gfal_set_ids: Performing gfal_set_ids." )
errCode, gfalObject, errMessage = self.gfal.gfal_set_ids( gfalObject, None, 0, str( srmRequestID ) )
if not errCode == 0:
errStr = "SRM2Storage.__gfal_set_ids: Failed to perform gfal_set_ids."
if not errMessage:
errMessage = os.strerror( errCode )
self.log.error( errStr, errMessage )
return S_ERROR( errCode, errMessage )
else:
self.log.debug( "SRM2Storage.__gfal_set_ids: Successfully performed gfal_set_ids." )
return S_OK( gfalObject )
def __gfal_exec( self, gfalObject, method, timeout_sendreceive = None ):
"""
In gfal, for every method (synchronous or asynchronous), you can define a sendreceive timeout and a connect timeout.
The connect timeout sets the maximum amount of time a client accepts to wait before establishing a successful TCP
connection to SRM (default 60 seconds).
The sendreceive timeout, allows a client to set the maximum time the send
of a request to SRM can take (normally all send operations return immediately unless there is no free TCP buffer)
and the maximum time to receive a reply (a token for example). Default 0, i.e. no timeout.
The srm timeout for asynchronous requests default to 3600 seconds
gfal_set_timeout_connect (int value)
gfal_set_timeout_sendreceive (int value)
gfal_set_timeout_bdii (int value)
gfal_set_timeout_srm (int value)
"""
self.log.debug( "SRM2Storage.__gfal_exec(%s): Starting" % method )
fcn = None
if hasattr( self.gfal, method ) and callable( getattr( self.gfal, method ) ):
fcn = getattr( self.gfal, method )
if not fcn:
return S_ERROR( DErrno.ENOMETH, "%s is not a member function of gfal" % method )
# return S_ERROR( "Unable to invoke %s for gfal, it isn't a member function" % method )
# # retry
retry = self.gfalRetry if self.gfalRetry else 1
# # initial timeout
timeout = timeout_sendreceive if timeout_sendreceive else self.gfalTimeout
# # errCode, errMessage, errNo
errCode, errMessage, errNo = 0, "", 0
for _i in range( retry ):
self.gfal.gfal_set_timeout_sendreceive( timeout )
errCode, gfalObject, errMessage = fcn( gfalObject )
if not errCode:
break
errNo = self.gfal.gfal_get_errno()
if errCode == -1 and errNo == errno.ECOMM:
timeout *= 2
self.log.debug( "SRM2Storage.__gfal_exec(%s): got ECOMM, extending timeout to %s s" % ( method, timeout ) )
if errCode:
errStr = "SRM2Storage.__gfal_exec(%s): Execution failed." % method
if not errMessage:
errMessage = os.strerror( errNo ) if errNo else "UNKNOWN ERROR"
self.log.error( errStr, errMessage )
return S_ERROR( errCode, errMessage )
self.log.debug( "SRM2Storage.__gfal_exec(%s): Successfully invoked." % method )
return S_OK( gfalObject )
def __get_results( self, gfalObject ):
""" retrive gfal results
:param self: self reference
:param gfalObject: gfal object
"""
self.log.debug( "SRM2Storage.__get_results: Performing gfal_get_results" )
numberOfResults, gfalObject, listOfResults = self.gfal.gfal_get_results( gfalObject )
if numberOfResults <= 0:
errObj = S_ERROR( DErrno.EGFAL, "SRM2Storage.__get_results: Did not obtain results with gfal_get_results." )
self.log.error( errObj )
return errObj
else:
self.log.debug( "SRM2Storage.__get_results: Retrieved %s results from gfal_get_results." % numberOfResults )
for result in listOfResults:
if result['status'] != 0:
if result['explanation']:
errMessage = result['explanation']
elif result['status'] > 0:
errMessage = os.strerror( result['status'] )
result['ErrorMessage'] = errMessage
return S_OK( listOfResults )
def __gfal_get_ids( self, gfalObject ):
""" get srmRequestToken
:param self: self reference
:param gfalObject: gfalObject
"""
self.log.debug( "SRM2Storage.__gfal_get_ids: Performing gfal_get_ids." )
numberOfResults, gfalObject, _srm1RequestID, _srm1FileIDs, srmRequestToken = self.gfal.gfal_get_ids( gfalObject )
if numberOfResults <= 0:
errObj = S_ERROR( DErrno.EGFAL, "__gfal_get_ids could not obtain request ID" )
self.log.error( errObj )
return errObj
else:
self.log.debug( "SRM2Storage.__get_gfal_ids: Retrieved SRM request ID %s." % srmRequestToken )
return S_OK( srmRequestToken )
def __destroy_gfal_object( self, gfalObject ):
""" del gfal object by calling gfal.gfal_internal_free
:param self: self reference
:param gfalObject: gfalObject
"""
self.log.debug( "SRM2Storage.__destroy_gfal_object: Performing gfal_internal_free." )
self.gfal.gfal_internal_free( gfalObject )
return S_OK()
|
andresailer/DIRAC
|
Resources/Storage/SRM2Storage.py
|
Python
|
gpl-3.0
| 82,153
|
[
"DIRAC"
] |
8827c4db553d09e99476f299f4104909f644ea6d1425774940f744de7974ac2d
|
#!/usr/bin/env python
import os
try:
__IPYTHON__
import sys
del sys.argv[1:]
except:
pass
import srwl_bl
import srwlib
import srwlpy
import math
import srwl_uti_smp
def set_optics(v, names=None, want_final_propagation=True):
el = []
pp = []
if not names:
names = ['S1', 'S1_HCM', 'HCM', 'HCM_DCM_C1', 'DCM_C1', 'DCM_C2', 'DCM_C2_HFM', 'HFM', 'After_HFM', 'After_HFM_CRL1', 'CRL1', 'CRL2', 'CRL2_Before_SSA', 'Before_SSA', 'SSA', 'SSA_Before_FFO', 'Before_FFO', 'AFFO', 'FFO', 'FFO_At_Sample', 'At_Sample']
for el_name in names:
if el_name == 'S1':
# S1: aperture 26.62m
el.append(srwlib.SRWLOptA(
_shape=v.op_S1_shape,
_ap_or_ob='a',
_Dx=v.op_S1_Dx,
_Dy=v.op_S1_Dy,
_x=v.op_S1_x,
_y=v.op_S1_y,
))
pp.append(v.op_S1_pp)
elif el_name == 'S1_HCM':
# S1_HCM: drift 26.62m
el.append(srwlib.SRWLOptD(
_L=v.op_S1_HCM_L,
))
pp.append(v.op_S1_HCM_pp)
elif el_name == 'HCM':
# HCM: sphericalMirror 28.35m
el.append(srwlib.SRWLOptMirSph(
_r=v.op_HCM_r,
_size_tang=v.op_HCM_size_tang,
_size_sag=v.op_HCM_size_sag,
_nvx=v.op_HCM_nvx,
_nvy=v.op_HCM_nvy,
_nvz=v.op_HCM_nvz,
_tvx=v.op_HCM_tvx,
_tvy=v.op_HCM_tvy,
_x=v.op_HCM_x,
_y=v.op_HCM_y,
))
pp.append(v.op_HCM_pp)
elif el_name == 'HCM_DCM_C1':
# HCM_DCM_C1: drift 28.35m
el.append(srwlib.SRWLOptD(
_L=v.op_HCM_DCM_C1_L,
))
pp.append(v.op_HCM_DCM_C1_pp)
elif el_name == 'DCM_C1':
# DCM_C1: crystal 30.42m
crystal = srwlib.SRWLOptCryst(
_d_sp=v.op_DCM_C1_d_sp,
_psi0r=v.op_DCM_C1_psi0r,
_psi0i=v.op_DCM_C1_psi0i,
_psi_hr=v.op_DCM_C1_psiHr,
_psi_hi=v.op_DCM_C1_psiHi,
_psi_hbr=v.op_DCM_C1_psiHBr,
_psi_hbi=v.op_DCM_C1_psiHBi,
_tc=v.op_DCM_C1_tc,
_ang_as=v.op_DCM_C1_ang_as,
_nvx=v.op_DCM_C1_nvx,
_nvy=v.op_DCM_C1_nvy,
_nvz=v.op_DCM_C1_nvz,
_tvx=v.op_DCM_C1_tvx,
_tvy=v.op_DCM_C1_tvy,
_uc=v.op_DCM_C1_uc,
_e_avg=v.op_DCM_C1_energy,
_ang_roll=v.op_DCM_C1_diffractionAngle
)
el.append(crystal)
pp.append(v.op_DCM_C1_pp)
elif el_name == 'DCM_C2':
# DCM_C2: crystal 30.42m
crystal = srwlib.SRWLOptCryst(
_d_sp=v.op_DCM_C2_d_sp,
_psi0r=v.op_DCM_C2_psi0r,
_psi0i=v.op_DCM_C2_psi0i,
_psi_hr=v.op_DCM_C2_psiHr,
_psi_hi=v.op_DCM_C2_psiHi,
_psi_hbr=v.op_DCM_C2_psiHBr,
_psi_hbi=v.op_DCM_C2_psiHBi,
_tc=v.op_DCM_C2_tc,
_ang_as=v.op_DCM_C2_ang_as,
_nvx=v.op_DCM_C2_nvx,
_nvy=v.op_DCM_C2_nvy,
_nvz=v.op_DCM_C2_nvz,
_tvx=v.op_DCM_C2_tvx,
_tvy=v.op_DCM_C2_tvy,
_uc=v.op_DCM_C2_uc,
_e_avg=v.op_DCM_C2_energy,
_ang_roll=v.op_DCM_C2_diffractionAngle
)
el.append(crystal)
pp.append(v.op_DCM_C2_pp)
elif el_name == 'DCM_C2_HFM':
# DCM_C2_HFM: drift 30.42m
el.append(srwlib.SRWLOptD(
_L=v.op_DCM_C2_HFM_L,
))
pp.append(v.op_DCM_C2_HFM_pp)
elif el_name == 'HFM':
# HFM: sphericalMirror 32.64m
el.append(srwlib.SRWLOptMirSph(
_r=v.op_HFM_r,
_size_tang=v.op_HFM_size_tang,
_size_sag=v.op_HFM_size_sag,
_nvx=v.op_HFM_nvx,
_nvy=v.op_HFM_nvy,
_nvz=v.op_HFM_nvz,
_tvx=v.op_HFM_tvx,
_tvy=v.op_HFM_tvy,
_x=v.op_HFM_x,
_y=v.op_HFM_y,
))
pp.append(v.op_HFM_pp)
elif el_name == 'After_HFM':
# After_HFM: watch 32.64m
pass
elif el_name == 'After_HFM_CRL1':
# After_HFM_CRL1: drift 32.64m
el.append(srwlib.SRWLOptD(
_L=v.op_After_HFM_CRL1_L,
))
pp.append(v.op_After_HFM_CRL1_pp)
elif el_name == 'CRL1':
# CRL1: crl 34.15m
el.append(srwlib.srwl_opt_setup_CRL(
_foc_plane=v.op_CRL1_foc_plane,
_delta=v.op_CRL1_delta,
_atten_len=v.op_CRL1_atten_len,
_shape=v.op_CRL1_shape,
_apert_h=v.op_CRL1_apert_h,
_apert_v=v.op_CRL1_apert_v,
_r_min=v.op_CRL1_r_min,
_n=v.op_CRL1_n,
_wall_thick=v.op_CRL1_wall_thick,
_xc=v.op_CRL1_x,
_yc=v.op_CRL1_y,
))
pp.append(v.op_CRL1_pp)
elif el_name == 'CRL2':
# CRL2: crl 34.15m
el.append(srwlib.srwl_opt_setup_CRL(
_foc_plane=v.op_CRL2_foc_plane,
_delta=v.op_CRL2_delta,
_atten_len=v.op_CRL2_atten_len,
_shape=v.op_CRL2_shape,
_apert_h=v.op_CRL2_apert_h,
_apert_v=v.op_CRL2_apert_v,
_r_min=v.op_CRL2_r_min,
_n=v.op_CRL2_n,
_wall_thick=v.op_CRL2_wall_thick,
_xc=v.op_CRL2_x,
_yc=v.op_CRL2_y,
))
pp.append(v.op_CRL2_pp)
elif el_name == 'CRL2_Before_SSA':
# CRL2_Before_SSA: drift 34.15m
el.append(srwlib.SRWLOptD(
_L=v.op_CRL2_Before_SSA_L,
))
pp.append(v.op_CRL2_Before_SSA_pp)
elif el_name == 'Before_SSA':
# Before_SSA: watch 61.75m
pass
elif el_name == 'SSA':
# SSA: aperture 61.75m
el.append(srwlib.SRWLOptA(
_shape=v.op_SSA_shape,
_ap_or_ob='a',
_Dx=v.op_SSA_Dx,
_Dy=v.op_SSA_Dy,
_x=v.op_SSA_x,
_y=v.op_SSA_y,
))
pp.append(v.op_SSA_pp)
elif el_name == 'SSA_Before_FFO':
# SSA_Before_FFO: drift 61.75m
el.append(srwlib.SRWLOptD(
_L=v.op_SSA_Before_FFO_L,
))
pp.append(v.op_SSA_Before_FFO_pp)
elif el_name == 'Before_FFO':
# Before_FFO: watch 109.0m
pass
elif el_name == 'AFFO':
# AFFO: aperture 109.0m
el.append(srwlib.SRWLOptA(
_shape=v.op_AFFO_shape,
_ap_or_ob='a',
_Dx=v.op_AFFO_Dx,
_Dy=v.op_AFFO_Dy,
_x=v.op_AFFO_x,
_y=v.op_AFFO_y,
))
pp.append(v.op_AFFO_pp)
elif el_name == 'FFO':
# FFO: lens 109.0m
el.append(srwlib.SRWLOptL(
_Fx=v.op_FFO_Fx,
_Fy=v.op_FFO_Fy,
_x=v.op_FFO_x,
_y=v.op_FFO_y,
))
pp.append(v.op_FFO_pp)
elif el_name == 'FFO_At_Sample':
# FFO_At_Sample: drift 109.0m
el.append(srwlib.SRWLOptD(
_L=v.op_FFO_At_Sample_L,
))
pp.append(v.op_FFO_At_Sample_pp)
elif el_name == 'At_Sample':
# At_Sample: watch 109.018147m
pass
if want_final_propagation:
pp.append(v.op_fin_pp)
return srwlib.SRWLOptC(el, pp)
varParam = [
['name', 's', 'NSLS-II HXN beamline: SSA closer', 'simulation name'],
#---Data Folder
['fdir', 's', '', 'folder (directory) name for reading-in input and saving output data files'],
#---Electron Beam
['ebm_nm', 's', '', 'standard electron beam name'],
['ebm_nms', 's', '', 'standard electron beam name suffix: e.g. can be Day1, Final'],
['ebm_i', 'f', 0.5, 'electron beam current [A]'],
['ebm_e', 'f', 3.0, 'electron beam avarage energy [GeV]'],
['ebm_de', 'f', 0.0, 'electron beam average energy deviation [GeV]'],
['ebm_x', 'f', 0.0, 'electron beam initial average horizontal position [m]'],
['ebm_y', 'f', 0.0, 'electron beam initial average vertical position [m]'],
['ebm_xp', 'f', 0.0, 'electron beam initial average horizontal angle [rad]'],
['ebm_yp', 'f', 0.0, 'electron beam initial average vertical angle [rad]'],
['ebm_z', 'f', 0., 'electron beam initial average longitudinal position [m]'],
['ebm_dr', 'f', -1.8, 'electron beam longitudinal drift [m] to be performed before a required calculation'],
['ebm_ens', 'f', 0.00089, 'electron beam relative energy spread'],
['ebm_emx', 'f', 9e-10, 'electron beam horizontal emittance [m]'],
['ebm_emy', 'f', 8e-12, 'electron beam vertical emittance [m]'],
# Definition of the beam through Twiss:
['ebm_betax', 'f', 1.84, 'horizontal beta-function [m]'],
['ebm_betay', 'f', 1.17, 'vertical beta-function [m]'],
['ebm_alphax', 'f', 0.0, 'horizontal alpha-function [rad]'],
['ebm_alphay', 'f', 0.0, 'vertical alpha-function [rad]'],
['ebm_etax', 'f', 0.0, 'horizontal dispersion function [m]'],
['ebm_etay', 'f', 0.0, 'vertical dispersion function [m]'],
['ebm_etaxp', 'f', 0.0, 'horizontal dispersion function derivative [rad]'],
['ebm_etayp', 'f', 0.0, 'vertical dispersion function derivative [rad]'],
#---Undulator
['und_bx', 'f', 0.0, 'undulator horizontal peak magnetic field [T]'],
['und_by', 'f', 0.88770981, 'undulator vertical peak magnetic field [T]'],
['und_phx', 'f', 0.0, 'initial phase of the horizontal magnetic field [rad]'],
['und_phy', 'f', 0.0, 'initial phase of the vertical magnetic field [rad]'],
['und_b2e', '', '', 'estimate undulator fundamental photon energy (in [eV]) for the amplitude of sinusoidal magnetic field defined by und_b or und_bx, und_by', 'store_true'],
['und_e2b', '', '', 'estimate undulator field amplitude (in [T]) for the photon energy defined by w_e', 'store_true'],
['und_per', 'f', 0.02, 'undulator period [m]'],
['und_len', 'f', 4.865095, 'undulator length [m]'],
['und_zc', 'f', 0.0, 'undulator center longitudinal position [m]'],
['und_sx', 'i', 1, 'undulator horizontal magnetic field symmetry vs longitudinal position'],
['und_sy', 'i', -1, 'undulator vertical magnetic field symmetry vs longitudinal position'],
['und_g', 'f', 5.622, 'undulator gap [mm] (assumes availability of magnetic measurement or simulation data)'],
['und_ph', 'f', 0.0, 'shift of magnet arrays [mm] for which the field should be set up'],
['und_mdir', 's', '', 'name of magnetic measurements sub-folder'],
['und_mfs', 's', '', 'name of magnetic measurements for different gaps summary file'],
#---Calculation Types
# Electron Trajectory
['tr', '', '', 'calculate electron trajectory', 'store_true'],
['tr_cti', 'f', 0.0, 'initial time moment (c*t) for electron trajectory calculation [m]'],
['tr_ctf', 'f', 0.0, 'final time moment (c*t) for electron trajectory calculation [m]'],
['tr_np', 'f', 10000, 'number of points for trajectory calculation'],
['tr_mag', 'i', 2, 'magnetic field to be used for trajectory calculation: 1- approximate, 2- accurate'],
['tr_fn', 's', 'res_trj.dat', 'file name for saving calculated trajectory data'],
['tr_pl', 's', '', 'plot the resulting trajectiry in graph(s): ""- dont plot, otherwise the string should list the trajectory components to plot'],
#Single-Electron Spectrum vs Photon Energy
['ss', '', '', 'calculate single-e spectrum vs photon energy', 'store_true'],
['ss_ei', 'f', 100.0, 'initial photon energy [eV] for single-e spectrum vs photon energy calculation'],
['ss_ef', 'f', 20000.0, 'final photon energy [eV] for single-e spectrum vs photon energy calculation'],
['ss_ne', 'i', 10000, 'number of points vs photon energy for single-e spectrum vs photon energy calculation'],
['ss_x', 'f', 0.0, 'horizontal position [m] for single-e spectrum vs photon energy calculation'],
['ss_y', 'f', 0.0, 'vertical position [m] for single-e spectrum vs photon energy calculation'],
['ss_meth', 'i', 1, 'method to use for single-e spectrum vs photon energy calculation: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler"'],
['ss_prec', 'f', 0.01, 'relative precision for single-e spectrum vs photon energy calculation (nominal value is 0.01)'],
['ss_pol', 'i', 6, 'polarization component to extract after spectrum vs photon energy calculation: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['ss_mag', 'i', 2, 'magnetic field to be used for single-e spectrum vs photon energy calculation: 1- approximate, 2- accurate'],
['ss_ft', 's', 'f', 'presentation/domain: "f"- frequency (photon energy), "t"- time'],
['ss_u', 'i', 1, 'electric field units: 0- arbitrary, 1- sqrt(Phot/s/0.1%bw/mm^2), 2- sqrt(J/eV/mm^2) or sqrt(W/mm^2), depending on representation (freq. or time)'],
['ss_fn', 's', 'res_spec_se.dat', 'file name for saving calculated single-e spectrum vs photon energy'],
['ss_pl', 's', '', 'plot the resulting single-e spectrum in a graph: ""- dont plot, "e"- show plot vs photon energy'],
#Multi-Electron Spectrum vs Photon Energy (taking into account e-beam emittance, energy spread and collection aperture size)
['sm', '', '', 'calculate multi-e spectrum vs photon energy', 'store_true'],
['sm_ei', 'f', 100.0, 'initial photon energy [eV] for multi-e spectrum vs photon energy calculation'],
['sm_ef', 'f', 20000.0, 'final photon energy [eV] for multi-e spectrum vs photon energy calculation'],
['sm_ne', 'i', 10000, 'number of points vs photon energy for multi-e spectrum vs photon energy calculation'],
['sm_x', 'f', 0.0, 'horizontal center position [m] for multi-e spectrum vs photon energy calculation'],
['sm_rx', 'f', 0.001, 'range of horizontal position / horizontal aperture size [m] for multi-e spectrum vs photon energy calculation'],
['sm_nx', 'i', 1, 'number of points vs horizontal position for multi-e spectrum vs photon energy calculation'],
['sm_y', 'f', 0.0, 'vertical center position [m] for multi-e spectrum vs photon energy calculation'],
['sm_ry', 'f', 0.001, 'range of vertical position / vertical aperture size [m] for multi-e spectrum vs photon energy calculation'],
['sm_ny', 'i', 1, 'number of points vs vertical position for multi-e spectrum vs photon energy calculation'],
['sm_mag', 'i', 1, 'magnetic field to be used for calculation of multi-e spectrum spectrum or intensity distribution: 1- approximate, 2- accurate'],
['sm_hi', 'i', 1, 'initial UR spectral harmonic to be taken into account for multi-e spectrum vs photon energy calculation'],
['sm_hf', 'i', 15, 'final UR spectral harmonic to be taken into account for multi-e spectrum vs photon energy calculation'],
['sm_prl', 'f', 1.0, 'longitudinal integration precision parameter for multi-e spectrum vs photon energy calculation'],
['sm_pra', 'f', 1.0, 'azimuthal integration precision parameter for multi-e spectrum vs photon energy calculation'],
['sm_meth', 'i', -1, 'method to use for spectrum vs photon energy calculation in case of arbitrary input magnetic field: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler", -1- dont use this accurate integration method (rather use approximate if possible)'],
['sm_prec', 'f', 0.01, 'relative precision for spectrum vs photon energy calculation in case of arbitrary input magnetic field (nominal value is 0.01)'],
['sm_nm', 'i', 1, 'number of macro-electrons for calculation of spectrum in case of arbitrary input magnetic field'],
['sm_na', 'i', 5, 'number of macro-electrons to average on each node at parallel (MPI-based) calculation of spectrum in case of arbitrary input magnetic field'],
['sm_ns', 'i', 5, 'saving periodicity (in terms of macro-electrons) for intermediate intensity at calculation of multi-electron spectrum in case of arbitrary input magnetic field'],
['sm_type', 'i', 1, 'calculate flux (=1) or flux per unit surface (=2)'],
['sm_pol', 'i', 6, 'polarization component to extract after calculation of multi-e flux or intensity: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['sm_rm', 'i', 1, 'method for generation of pseudo-random numbers for e-beam phase-space integration: 1- standard pseudo-random number generator, 2- Halton sequences, 3- LPtau sequences (to be implemented)'],
['sm_fn', 's', 'res_spec_me.dat', 'file name for saving calculated milti-e spectrum vs photon energy'],
['sm_pl', 's', '', 'plot the resulting spectrum-e spectrum in a graph: ""- dont plot, "e"- show plot vs photon energy'],
#to add options for the multi-e calculation from "accurate" magnetic field
#Power Density Distribution vs horizontal and vertical position
['pw', '', '', 'calculate SR power density distribution', 'store_true'],
['pw_x', 'f', 0.0, 'central horizontal position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_rx', 'f', 0.025, 'range of horizontal position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_nx', 'i', 100, 'number of points vs horizontal position for calculation of power density distribution'],
['pw_y', 'f', 0.0, 'central vertical position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_ry', 'f', 0.015, 'range of vertical position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_ny', 'i', 100, 'number of points vs vertical position for calculation of power density distribution'],
['pw_pr', 'f', 1.0, 'precision factor for calculation of power density distribution'],
['pw_meth', 'i', 1, 'power density computation method (1- "near field", 2- "far field")'],
['pw_zst', 'f', 0., 'initial longitudinal position along electron trajectory of power density distribution (effective if pow_sst < pow_sfi)'],
['pw_zfi', 'f', 0., 'final longitudinal position along electron trajectory of power density distribution (effective if pow_sst < pow_sfi)'],
['pw_mag', 'i', 2, 'magnetic field to be used for power density calculation: 1- approximate, 2- accurate'],
['pw_fn', 's', 'res_pow.dat', 'file name for saving calculated power density distribution'],
['pw_pl', 's', '', 'plot the resulting power density distribution in a graph: ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
#Single-Electron Intensity distribution vs horizontal and vertical position
['si', '', '', 'calculate single-e intensity distribution (without wavefront propagation through a beamline) vs horizontal and vertical position', 'store_true'],
#Single-Electron Wavefront Propagation
['ws', '', '', 'calculate single-electron (/ fully coherent) wavefront propagation', 'store_true'],
#Multi-Electron (partially-coherent) Wavefront Propagation
['wm', '', '', 'calculate multi-electron (/ partially coherent) wavefront propagation', 'store_true'],
['w_e', 'f', 8000.0, 'photon energy [eV] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ef', 'f', -1.0, 'final photon energy [eV] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ne', 'i', 1, 'number of points vs photon energy for calculation of intensity distribution'],
['w_x', 'f', 0.0, 'central horizontal position [m] for calculation of intensity distribution'],
['w_rx', 'f', 0.003, 'range of horizontal position [m] for calculation of intensity distribution'],
['w_nx', 'i', 100, 'number of points vs horizontal position for calculation of intensity distribution'],
['w_y', 'f', 0.0, 'central vertical position [m] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ry', 'f', 0.0007, 'range of vertical position [m] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ny', 'i', 100, 'number of points vs vertical position for calculation of intensity distribution'],
['w_smpf', 'f', 0.1, 'sampling factor for calculation of intensity distribution vs horizontal and vertical position'],
['w_meth', 'i', 1, 'method to use for calculation of intensity distribution vs horizontal and vertical position: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler"'],
['w_prec', 'f', 0.01, 'relative precision for calculation of intensity distribution vs horizontal and vertical position'],
['w_u', 'i', 1, 'electric field units: 0- arbitrary, 1- sqrt(Phot/s/0.1%bw/mm^2), 2- sqrt(J/eV/mm^2) or sqrt(W/mm^2), depending on representation (freq. or time)'],
['si_pol', 'i', 6, 'polarization component to extract after calculation of intensity distribution: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['si_type', 'i', 0, 'type of a characteristic to be extracted after calculation of intensity distribution: 0- Single-Electron Intensity, 1- Multi-Electron Intensity, 2- Single-Electron Flux, 3- Multi-Electron Flux, 4- Single-Electron Radiation Phase, 5- Re(E): Real part of Single-Electron Electric Field, 6- Im(E): Imaginary part of Single-Electron Electric Field, 7- Single-Electron Intensity, integrated over Time or Photon Energy'],
['w_mag', 'i', 2, 'magnetic field to be used for calculation of intensity distribution vs horizontal and vertical position: 1- approximate, 2- accurate'],
['si_fn', 's', 'res_int_se.dat', 'file name for saving calculated single-e intensity distribution (without wavefront propagation through a beamline) vs horizontal and vertical position'],
['si_pl', 's', '', 'plot the input intensity distributions in graph(s): ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
['ws_fni', 's', 'res_int_pr_se.dat', 'file name for saving propagated single-e intensity distribution vs horizontal and vertical position'],
['ws_pl', 's', '', 'plot the resulting intensity distributions in graph(s): ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
['wm_nm', 'i', 1000, 'number of macro-electrons (coherent wavefronts) for calculation of multi-electron wavefront propagation'],
['wm_na', 'i', 5, 'number of macro-electrons (coherent wavefronts) to average on each node for parallel (MPI-based) calculation of multi-electron wavefront propagation'],
['wm_ns', 'i', 5, 'saving periodicity (in terms of macro-electrons / coherent wavefronts) for intermediate intensity at multi-electron wavefront propagation calculation'],
['wm_ch', 'i', 0, 'type of a characteristic to be extracted after calculation of multi-electron wavefront propagation: #0- intensity (s0); 1- four Stokes components; 2- mutual intensity cut vs x; 3- mutual intensity cut vs y; 40- intensity(s0), mutual intensity cuts and degree of coherence vs X & Y'],
['wm_ap', 'i', 0, 'switch specifying representation of the resulting Stokes parameters: coordinate (0) or angular (1)'],
['wm_x0', 'f', 0.0, 'horizontal center position for mutual intensity cut calculation'],
['wm_y0', 'f', 0.0, 'vertical center position for mutual intensity cut calculation'],
['wm_ei', 'i', 0, 'integration over photon energy is required (1) or not (0); if the integration is required, the limits are taken from w_e, w_ef'],
['wm_rm', 'i', 1, 'method for generation of pseudo-random numbers for e-beam phase-space integration: 1- standard pseudo-random number generator, 2- Halton sequences, 3- LPtau sequences (to be implemented)'],
['wm_am', 'i', 0, 'multi-electron integration approximation method: 0- no approximation (use the standard 5D integration method), 1- integrate numerically only over e-beam energy spread and use convolution to treat transverse emittance'],
['wm_fni', 's', 'res_int_pr_me.dat', 'file name for saving propagated multi-e intensity distribution vs horizontal and vertical position'],
['wm_ff', 's', 'ascii', 'format of file name for saving propagated multi-e intensity distribution vs horizontal and vertical position (ascii and hdf5 supported)'],
['wm_nmm', 'i', 1, 'number of MPI masters to use'],
['wm_ncm', 'i', 100, 'number of Coherent Modes to calculate'],
['wm_acm', 's', 'SP', 'coherent mode decomposition algorithm to be used (supported algorithms are: "SP" for SciPy, "SPS" for SciPy Sparse, "PM" for Primme, based on names of software packages)'],
['wm_nop', '', '', 'switch forcing to do calculations ignoring any optics defined (by set_optics function)', 'store_true'],
['wm_fnmi', 's', '', 'file name of input cross-spectral density / mutual intensity; if this file name is supplied, the initial cross-spectral density (for such operations as coherent mode decomposition) will not be calculated, but rathre it will be taken from that file.'],
['wm_fncm', 's', '', 'file name of input coherent modes; if this file name is supplied, the eventual partially-coherent radiation propagation simulation will be done based on propagation of the coherent modes from that file.'],
['wm_fbk', '', '', 'create backup file(s) with propagated multi-e intensity distribution vs horizontal and vertical position and other radiation characteristics', 'store_true'],
# Optics parameters
['op_r', 'f', 20.0, 'longitudinal position of the first optical element [m]'],
# Former appParam:
['rs_type', 's', 't', 'source type, (u) idealized undulator, (t), tabulated undulator, (m) multipole, (g) gaussian beam'],
#---Beamline optics:
# S1: aperture
['op_S1_shape', 's', 'r', 'shape'],
['op_S1_Dx', 'f', 0.0025, 'horizontalSize'],
['op_S1_Dy', 'f', 0.0007, 'verticalSize'],
['op_S1_x', 'f', 0.0, 'horizontalOffset'],
['op_S1_y', 'f', 0.0, 'verticalOffset'],
# S1_HCM: drift
['op_S1_HCM_L', 'f', 1.7300000000000004, 'length'],
# HCM: sphericalMirror
['op_HCM_hfn', 's', 'None', 'heightProfileFile'],
['op_HCM_dim', 's', 'x', 'orientation'],
['op_HCM_r', 'f', 17718.8, 'radius'],
['op_HCM_size_tang', 'f', 1.0, 'tangentialSize'],
['op_HCM_size_sag', 'f', 0.006, 'sagittalSize'],
['op_HCM_ang', 'f', 0.0032, 'grazingAngle'],
['op_HCM_nvx', 'f', 0.9999948800043691, 'normalVectorX'],
['op_HCM_nvy', 'f', 0.0, 'normalVectorY'],
['op_HCM_nvz', 'f', -0.003199994538669463, 'normalVectorZ'],
['op_HCM_tvx', 'f', 0.003199994538669463, 'tangentialVectorX'],
['op_HCM_tvy', 'f', 0.0, 'tangentialVectorY'],
['op_HCM_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_HCM_x', 'f', 0.0, 'horizontalOffset'],
['op_HCM_y', 'f', 0.0, 'verticalOffset'],
# HCM_DCM_C1: drift
['op_HCM_DCM_C1_L', 'f', 2.0700000000000003, 'length'],
# DCM_C1: crystal
['op_DCM_C1_hfn', 's', '', 'heightProfileFile'],
['op_DCM_C1_dim', 's', 'x', 'orientation'],
['op_DCM_C1_d_sp', 'f', 3.1355713563754857, 'dSpacing'],
['op_DCM_C1_psi0r', 'f', -1.5322783990464697e-05, 'psi0r'],
['op_DCM_C1_psi0i', 'f', 3.594107754061173e-07, 'psi0i'],
['op_DCM_C1_psiHr', 'f', -8.107063544835198e-06, 'psiHr'],
['op_DCM_C1_psiHi', 'f', 2.509311323470587e-07, 'psiHi'],
['op_DCM_C1_psiHBr', 'f', -8.107063544835198e-06, 'psiHBr'],
['op_DCM_C1_psiHBi', 'f', 2.509311323470587e-07, 'psiHBi'],
['op_DCM_C1_tc', 'f', 0.01, 'crystalThickness'],
['op_DCM_C1_uc', 'f', 1, 'useCase'],
['op_DCM_C1_ang_as', 'f', 0.0, 'asymmetryAngle'],
['op_DCM_C1_nvx', 'f', -0.9689738178863608, 'nvx'],
['op_DCM_C1_nvy', 'f', 6.5840770039163984e-09, 'nvy'],
['op_DCM_C1_nvz', 'f', -0.24716338776349875, 'nvz'],
['op_DCM_C1_tvx', 'f', -0.24716338776349875, 'tvx'],
['op_DCM_C1_tvy', 'f', 1.6794496895008727e-09, 'tvy'],
['op_DCM_C1_ang', 'f', 0.2497517176345311, 'grazingAngle'],
['op_DCM_C1_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_DCM_C1_energy', 'f', 8000.0, 'energy'],
['op_DCM_C1_diffractionAngle', 'f', 1.57079632, 'diffractionAngle'],
# DCM_C2: crystal
['op_DCM_C2_hfn', 's', '', 'heightProfileFile'],
['op_DCM_C2_dim', 's', 'x', 'orientation'],
['op_DCM_C2_d_sp', 'f', 3.1355713563754857, 'dSpacing'],
['op_DCM_C2_psi0r', 'f', -1.5322783990464697e-05, 'psi0r'],
['op_DCM_C2_psi0i', 'f', 3.594107754061173e-07, 'psi0i'],
['op_DCM_C2_psiHr', 'f', -8.107063544835198e-06, 'psiHr'],
['op_DCM_C2_psiHi', 'f', 2.509311323470587e-07, 'psiHi'],
['op_DCM_C2_psiHBr', 'f', -8.107063544835198e-06, 'psiHBr'],
['op_DCM_C2_psiHBi', 'f', 2.509311323470587e-07, 'psiHBi'],
['op_DCM_C2_tc', 'f', 0.01, 'crystalThickness'],
['op_DCM_C2_uc', 'f', 1, 'useCase'],
['op_DCM_C2_ang_as', 'f', 0.0, 'asymmetryAngle'],
['op_DCM_C2_nvx', 'f', 0.9689738178863608, 'nvx'],
['op_DCM_C2_nvy', 'f', 6.5840770039163984e-09, 'nvy'],
['op_DCM_C2_nvz', 'f', -0.24716338776349875, 'nvz'],
['op_DCM_C2_tvx', 'f', 0.24716338776349875, 'tvx'],
['op_DCM_C2_tvy', 'f', 1.6794496895008727e-09, 'tvy'],
['op_DCM_C2_ang', 'f', 0.2497517176345311, 'grazingAngle'],
['op_DCM_C2_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_DCM_C2_energy', 'f', 8000.0, 'energy'],
['op_DCM_C2_diffractionAngle', 'f', -1.57079632, 'diffractionAngle'],
# DCM_C2_HFM: drift
['op_DCM_C2_HFM_L', 'f', 2.219999999999999, 'length'],
# HFM: sphericalMirror
['op_HFM_hfn', 's', 'None', 'heightProfileFile'],
['op_HFM_dim', 's', 'x', 'orientation'],
['op_HFM_r', 'f', 18193.8, 'radius'],
['op_HFM_size_tang', 'f', 1.0, 'tangentialSize'],
['op_HFM_size_sag', 'f', 0.06, 'sagittalSize'],
['op_HFM_ang', 'f', 0.0032, 'grazingAngle'],
['op_HFM_nvx', 'f', -0.9999948800043691, 'normalVectorX'],
['op_HFM_nvy', 'f', 0.0, 'normalVectorY'],
['op_HFM_nvz', 'f', -0.003199994538669463, 'normalVectorZ'],
['op_HFM_tvx', 'f', -0.003199994538669463, 'tangentialVectorX'],
['op_HFM_tvy', 'f', 0.0, 'tangentialVectorY'],
['op_HFM_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_HFM_x', 'f', 0.0, 'horizontalOffset'],
['op_HFM_y', 'f', 0.0, 'verticalOffset'],
# After_HFM_CRL1: drift
['op_After_HFM_CRL1_L', 'f', 1.509999999999998, 'length'],
# CRL1: crl
['op_CRL1_foc_plane', 'f', 2, 'focalPlane'],
['op_CRL1_delta', 'f', 5.326453e-06, 'refractiveIndex'],
['op_CRL1_atten_len', 'f', 0.005276, 'attenuationLength'],
['op_CRL1_shape', 'f', 1, 'shape'],
['op_CRL1_apert_h', 'f', 0.003, 'horizontalApertureSize'],
['op_CRL1_apert_v', 'f', 0.0015, 'verticalApertureSize'],
['op_CRL1_r_min', 'f', 0.001, 'tipRadius'],
['op_CRL1_wall_thick', 'f', 5e-05, 'tipWallThickness'],
['op_CRL1_x', 'f', 0.0, 'horizontalOffset'],
['op_CRL1_y', 'f', 0.0, 'verticalOffset'],
['op_CRL1_n', 'i', 4, 'numberOfLenses'],
# CRL2: crl
['op_CRL2_foc_plane', 'f', 2, 'focalPlane'],
['op_CRL2_delta', 'f', 5.326453e-06, 'refractiveIndex'],
['op_CRL2_atten_len', 'f', 0.005276, 'attenuationLength'],
['op_CRL2_shape', 'f', 1, 'shape'],
['op_CRL2_apert_h', 'f', 0.003, 'horizontalApertureSize'],
['op_CRL2_apert_v', 'f', 0.0015, 'verticalApertureSize'],
['op_CRL2_r_min', 'f', 0.0015, 'tipRadius'],
['op_CRL2_wall_thick', 'f', 5e-05, 'tipWallThickness'],
['op_CRL2_x', 'f', 0.0, 'horizontalOffset'],
['op_CRL2_y', 'f', 0.0, 'verticalOffset'],
['op_CRL2_n', 'i', 3, 'numberOfLenses'],
# CRL2_Before_SSA: drift
['op_CRL2_Before_SSA_L', 'f', 27.6, 'length'],
# SSA: aperture
['op_SSA_shape', 's', 'r', 'shape'],
['op_SSA_Dx', 'f', 5e-05, 'horizontalSize'],
['op_SSA_Dy', 'f', 0.0001, 'verticalSize'],
['op_SSA_x', 'f', 0.0, 'horizontalOffset'],
['op_SSA_y', 'f', 0.0, 'verticalOffset'],
# SSA_Before_FFO: drift
['op_SSA_Before_FFO_L', 'f', 47.25, 'length'],
# AFFO: aperture
['op_AFFO_shape', 's', 'r', 'shape'],
['op_AFFO_Dx', 'f', 0.00015, 'horizontalSize'],
['op_AFFO_Dy', 'f', 0.00015, 'verticalSize'],
['op_AFFO_x', 'f', 0.0, 'horizontalOffset'],
['op_AFFO_y', 'f', 0.0, 'verticalOffset'],
# FFO: lens
['op_FFO_Fx', 'f', 0.01814, 'horizontalFocalLength'],
['op_FFO_Fy', 'f', 0.01814, 'verticalFocalLength'],
['op_FFO_x', 'f', 0.0, 'horizontalOffset'],
['op_FFO_y', 'f', 0.0, 'verticalOffset'],
# FFO_At_Sample: drift
['op_FFO_At_Sample_L', 'f', 0.018146999999999025, 'length'],
#---Propagation parameters
['op_S1_pp', 'f', [0, 0, 1.0, 0, 0, 1.2, 2.5, 1.2, 10.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'S1'],
['op_S1_HCM_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'S1_HCM'],
['op_HCM_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'HCM'],
['op_HCM_DCM_C1_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'HCM_DCM_C1'],
['op_DCM_C1_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'DCM_C1'],
['op_DCM_C2_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'DCM_C2'],
['op_DCM_C2_HFM_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'DCM_C2_HFM'],
['op_HFM_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'HFM'],
['op_After_HFM_CRL1_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'After_HFM_CRL1'],
['op_CRL1_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'CRL1'],
['op_CRL2_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'CRL2'],
['op_CRL2_Before_SSA_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'CRL2_Before_SSA'],
['op_SSA_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'SSA'],
['op_SSA_Before_FFO_pp', 'f', [0, 0, 1.0, 3, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'SSA_Before_FFO'],
['op_AFFO_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'AFFO'],
['op_FFO_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'FFO'],
['op_FFO_At_Sample_pp', 'f', [0, 0, 1.0, 4, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'FFO_At_Sample'],
['op_fin_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'final post-propagation (resize) parameters'],
#[ 0]: Auto-Resize (1) or not (0) Before propagation
#[ 1]: Auto-Resize (1) or not (0) After propagation
#[ 2]: Relative Precision for propagation with Auto-Resizing (1. is nominal)
#[ 3]: Allow (1) or not (0) for semi-analytical treatment of the quadratic (leading) phase terms at the propagation
#[ 4]: Do any Resizing on Fourier side, using FFT, (1) or not (0)
#[ 5]: Horizontal Range modification factor at Resizing (1. means no modification)
#[ 6]: Horizontal Resolution modification factor at Resizing
#[ 7]: Vertical Range modification factor at Resizing
#[ 8]: Vertical Resolution modification factor at Resizing
#[ 9]: Type of wavefront Shift before Resizing (not yet implemented)
#[10]: New Horizontal wavefront Center position after Shift (not yet implemented)
#[11]: New Vertical wavefront Center position after Shift (not yet implemented)
#[12]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Horizontal Coordinate
#[13]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Vertical Coordinate
#[14]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Longitudinal Coordinate
#[15]: Optional: Orientation of the Horizontal Base vector of the Output Frame in the Incident Beam Frame: Horizontal Coordinate
#[16]: Optional: Orientation of the Horizontal Base vector of the Output Frame in the Incident Beam Frame: Vertical Coordinate
]
def setup_magnetic_measurement_files(filename, v):
import os
c = None
f = None
r = 0
try:
import mpi4py.MPI
if mpi4py.MPI.COMM_WORLD.Get_size() > 1:
c = mpi4py.MPI.COMM_WORLD
r = c.Get_rank()
except Exception:
pass
if r == 0:
try:
import zipfile
z = zipfile.ZipFile(filename)
f = [x for x in z.namelist() if x.endswith('.txt')]
if len(f) != 1:
raise RuntimeError(
'{} magnetic measurement index (*.txt) file={}'.format(
'too many' if len(f) > 0 else 'missing',
filename,
)
)
f = f[0]
z.extractall()
except Exception:
if c:
c.Abort(1)
raise
if c:
f = c.bcast(f, root=0)
v.und_mfs = os.path.basename(f)
v.und_mdir = os.path.dirname(f) or './'
def epilogue():
pass
def main():
v = srwl_bl.srwl_uti_parse_options(srwl_bl.srwl_uti_ext_options(varParam), use_sys_argv=True)
setup_magnetic_measurement_files("magn_meas_u20_hxn.zip", v)
names = ['S1','S1_HCM','HCM','HCM_DCM_C1','DCM_C1','DCM_C2','DCM_C2_HFM','HFM','After_HFM','After_HFM_CRL1','CRL1','CRL2','CRL2_Before_SSA','Before_SSA','SSA','SSA_Before_FFO','Before_FFO','AFFO','FFO','FFO_At_Sample','At_Sample']
op = set_optics(v, names, True)
v.ws = True
v.ws_pl = 'xy'
v.ss = True
v.ss_pl = 'e'
v.sm = True
v.sm_pl = 'e'
v.pw = True
v.pw_pl = 'xy'
v.si = True
v.si_pl = 'xy'
v.tr = True
v.tr_pl = 'xz'
srwl_bl.SRWLBeamline(_name=v.name).calc_all(v, op)
main()
epilogue()
|
radiasoft/sirepo
|
tests/template/srw_generate_data/nsls-ii-hxn-beamline-ssa-closer.py
|
Python
|
apache-2.0
| 38,894
|
[
"CRYSTAL",
"Gaussian"
] |
e99e873b7eb80131fd1fa93d49b3b122b502a311accafc938da1645893498f3b
|
#!/bin/py
#
# Combustion Theory Final Exam: Turbulent Diffusion Flames
#
import sys
import numpy as np
import pylab
import scipy.special as ss
def beta(a, b, mew):
e1 = ss.gamma(a + b)
e2 = ss.gamma(a)
e3 = ss.gamma(b)
e4 = mew ** (a - 1)
e5 = (1 - mew) ** (b - 1)
return (e1/(e2*e3)) * e4 * e5
def plot_beta(a, b):
Ly = []
Lx = []
mews = np.mgrid[0:1:1000j]
for mew in mews:
Lx.append(mew)
Ly.append(beta(a, b, mew))
pylab.plot(Lx, Ly, linewidth=3.0)
#
# main function
#
if __name__ == "__main__":
""" run a simulation, and plot everything up"""
#
# define field
#
nx = 50
ny = 91
# sanity check
if(ny%2 != 1):
print 'ny must be odd!'
print ny
print sys.exit(1)
# initialize
z = np.zeros(ny)
#
# enforce b.c. ( x < 0, y = 0 => z=1)
#
z[0:ny/2 +1] = 1.0
# spacing
dx = 1 # centimeters
dy = 0.2 # centimeters
# Diffusivities
DL = 0.176 # (cm^2/sec)
DT = 10*DL
u = 100 # 100 centimeters = 1 m/s
# --------------------------------------------------
#
# spatial iteration loop!
#
# --------------------------------------------------
zf = []
zf.append(z)
zlf = []
zlf.append(z)
zzf = []
zzf.append(np.zeros(ny))
zl = z
for i in xrange(nx):
zt = np.zeros(ny)
zt[0] = 1.0
zlt = np.zeros(ny)
zlt[0] = 1.0
for j in xrange(1,ny-1):
zt[j] = DT*dx*(z[j+1]-2*z[j]+z[j-1])/(u*dy*dy) + z[j]
zlt[j] = DL*dx*(zl[j+1]-2*zl[j]+zl[j-1])/(u*dy*dy) + zl[j]
#
# update mean field and save state
#
z = zt
zl = zlt
zlf.append(zl)
zf.append(z)
#
# calculate fluctuation \bar (z'z')
#
zzt = np.zeros(ny)
for j in xrange(1,ny-1):
zzt[j] = 0.25*((z[j+1]-z[j-1])/dy)**2 * DT * (i*dx)/u
zzf.append(zzt)
# --------------------------------------------------
#
# plot solution of mean field
#
# --------------------------------------------------
y = np.arange(-dy*ny/2., dy*ny/2., dy)
ind=0
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zf[ind], linewidth=2.0, label=t)
pylab.xlabel('y (cm)',size=22.0)
pylab.ylabel(r'$\bar z$', size=30.0)
pylab.title(r'Mean value of the mixture fraction, $\bar z$ at several x-locations. ')
pylab.grid(True)
#
# 5 cm, 30 cm and 50 cm
#
ind=5
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zf[ind], linewidth=2.0, label=t)
ind=30
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zf[ind], linewidth=2.0, label=t)
ind=50
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zf[ind], linewidth=2.0, label=t)
#
# plot mean field
#
pylab.xlim([-5,5])
pylab.ylim([-0.1,1.1])
pylab.legend()
pylab.savefig('mean.pdf')
pylab.close()
# --------------------------------------------------
#
# calculate and plot fluctuation!
#
# --------------------------------------------------
ind=0
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zzf[ind], linewidth=2.0, label=t)
pylab.xlabel('y (cm)',size=22.0)
pylab.ylabel(r'$\bar{z^{\prime}z^{\prime}}$', size=30.0)
pylab.title(r'Fluctuating value of the mixture fraction')
pylab.grid(True)
ind=4
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zzf[ind], linewidth=2.0, label=t)
ind=30
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zzf[ind], linewidth=2.0, label=t)
ind=50
t='x = '+str(ind*dx)+' cm'
pylab.plot(y,zzf[ind], linewidth=2.0, label=t)
#
# plot fluctuation
#
pylab.legend()
pylab.savefig('fluc.pdf')
pylab.close()
# --------------------------------------------------
#
# calculate and plot PDF of mixture fraction!
#
# --------------------------------------------------
#
# y = 0, x = 30 (should look gaussian)
#
ind = 30
yloc = ny/2.
zbar = zf[ind][yloc]
zzbar = zzf[ind][yloc]
gamm = (zbar * (1 - zbar ) / (zzbar*zzbar) ) - 1
if(gamm < 0):
gamm = 0
alph = zbar * gamm
bet = (1-zbar)*gamm
plot_beta(alph, bet)
#
# plot at y = 15, x = 30 (should look like a delta function)
#
ind = 30
yloc = ny-1
zbar = zf[ind][yloc]
print zbar
zzbar = zzf[ind][yloc]
gamm = (zbar * (1 - zbar ) / (zzbar*zzbar) ) - 1
alph = zbar * gamm
bet = (1-zbar)*gamm
plot_beta(alph, bet)
#
# generic plot options
#
pylab.xlabel(r'$\bar z$',size=22.0)
pylab.ylabel(r'$P(\bar z)$', size=30.0)
pylab.xlim(0.0, 1.0)
pylab.ylim(0.0, 6.0)
pylab.legend()
pylab.savefig('pdf.pdf')
pylab.close()
# --------------------------------------------------
#
# calculate and plot temperature profiles for laminar and turbulent profiles
#
# --------------------------------------------------
# laminar:
#
tl = zlf
tu = 293. # room temp
Q = 55
# CP = 1.00 kJ/kg.K
cp = 1.00
nuf = 1
wf = 1
#
# location = 30 cm
#
ind = 30
trl = tu + zlf[ind]*Q/(cp*nuf*wf)
tll = tu + (1-zlf[ind])*Q/(cp*nuf*wf)
tll[ny/2:]=trl[ny/2:]
pylab.plot(y,tll, linewidth=2.0, label='Laminar')
#
# turbulent
#
tr = tu + zf[ind]*Q/(cp*nuf*wf)
tl = tu + (1-zf[ind])*Q/(cp*nuf*wf)
tl[ny/2:]=tr[ny/2:]
pylab.plot(y,tl, linewidth=2.0, label='Turbulent')
#
# plot
#
pylab.xlim(-3.0, 3.0)
pylab.xlabel('y (cm)',size=22.0)
pylab.ylabel('Temperature (K)', size=30.0)
pylab.legend()
pylab.savefig('temperature.pdf')
#
# nick
# 5/9/14
#
|
nicholasmalaya/paleologos
|
combustion/final/flow.py
|
Python
|
mit
| 5,810
|
[
"Gaussian"
] |
5d2a599a3a5cf528e80a93233bdcda186261015be55e2168e6cde0051006731e
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
##############################################################################
# Data inspectors for VTK
from __future__ import division
from vistrails.core.modules.vistrails_module import ModuleError
from vistrails.core.modules.basic_modules import Module, Float, Integer
from vistrails.core.modules.config import ModuleSettings
import vtk
from .hasher import vtk_hasher
from .vtk_wrapper.wrapper import VTKInstanceWrapper
class vtkBaseInspector(Module):
_settings = ModuleSettings(abstract=True)
def auto_set_results(self, vtk_object):
mid = self.moduleInfo['moduleId']
for function in self.outputPorts.keys():
if hasattr(vtk_object, function):
retValues = getattr(vtk_object, function)()
if issubclass(retValues.__class__, vtk.vtkObject):
output = VTKInstanceWrapper(retValues, mid)
self.set_output(function, output)
elif isinstance(retValues, (tuple, list)):
result = list(retValues)
for i in xrange(len(result)):
if issubclass(result[i].__class__, vtk.vtkObject):
result[i] = VTKInstanceWrapper(result[i], mid)
self.set_output(function, type(retValues)(result))
else:
self.set_output(function, retValues)
class vtkDataSetInspector(vtkBaseInspector):
_settings = ModuleSettings(abstract=False, signature=vtk_hasher)
_input_ports = [('SetInputConnection0', 'vtkAlgorithmOutput'),
('SetInput', 'vtkDataSet'),
]
_output_ports = [('GetBounds', [Float] * 6),
('GetScalarRange', [Float] * 2),
('GetLength', [Float]),
('GetCenter', [Float] * 3),
('GetNumberOfPoints', [Integer]),
('GetNumberOfCells', [Integer]),
('GetPointData', 'vtkPointData'),
('GetCellData', 'vtkCellData'),
]
def compute(self):
port_object = None
if self.has_input("SetInputConnection0"):
ic = self.get_input("SetInputConnection0")
if hasattr(ic, "vtkInstance"):
ic = ic.vtkInstance
producer = ic.GetProducer()
try:
port_object = producer.GetOutput()
except AttributeError:
raise ModuleError(self,
"expected a module that supports GetOutput")
elif self.has_input("SetInput"):
port_object = self.get_input("SetInput")
if hasattr(port_object, "vtkInstance"):
port_object = port_object.vtkInstance
if port_object:
self.auto_set_results(port_object)
class vtkDataSetAttributesInspector(vtkBaseInspector):
_settings = ModuleSettings(abstract=False, signature=vtk_hasher)
_input_ports = [('SetInput', 'vtkDataSetAttributes')]
_output_ports = [('GetScalars', 'vtkDataArray'),
('GetVectors', 'vtkDataArray'),
('GetNormals', 'vtkDataArray'),
('GetTCoords', 'vtkDataArray'),
('GetTensors', 'vtkDataArray'),
('GetGlobalIds', 'vtkDataArray'),
('GetPedigreeIds', 'vtkAbstractArray'),
]
def compute(self):
vtk_object = None
if self.has_input("SetInput"):
vtk_object = self.get_input("SetInput")
if hasattr(vtk_object, "vtkInstance"):
vtk_object = vtk_object.vtkInstance
if vtk_object:
self.auto_set_results(vtk_object)
class vtkDataArrayInspector(vtkBaseInspector):
_settings = ModuleSettings(abstract=False, signature=vtk_hasher)
_input_ports = [('SetInput', 'vtkDataArray')]
_output_ports = [('GetMaxNorm', [Float]),
('GetRange', [Float] * 2)]
def compute(self):
vtk_object = None
if self.has_input("SetInput"):
vtk_object = self.get_input("SetInput")
if hasattr(vtk_object, "vtkInstance"):
vtk_object = vtk_object.vtkInstance
if vtk_object:
self.auto_set_results(vtk_object)
class vtkPolyDataInspector(vtkDataSetInspector):
_settings = ModuleSettings(abstract=False, signature=vtk_hasher)
_input_ports = [('SetInputConnection0', 'vtkAlgorithmOutput'),
('SetInput', 'vtkDataSet'),
]
_output_ports = [('GetVerts', 'vtkCellArray'),
('GetLines', 'vtkCellArray'),
('GetPolys', 'vtkCellArray'),
('GetStrips', 'vtkCellArray'),
('GetPoints', 'vtkPoints'),
('GetNumberOfVerts', [Integer]),
('GetNumberOfLines', [Integer]),
('GetNumberOfPolys', [Integer]),
('GetNumberOfStrips', [Integer]),
]
def compute(self):
vtk_object = None
if self.has_input("SetInputConnection0"):
port_object = self.get_input("SetInputConnection0")
if hasattr(port_object, "vtkInstance"):
port_object = port_object.vtkInstance
producer = port_object.GetProducer()
try:
vtk_object = producer.GetOutput()
except AttributeError:
raise ModuleError(self,
"expected a module that supports GetOutput")
elif self.has_input("SetInput"):
vtk_object = self.get_input("SetInput")
if hasattr(vtk_object, "vtkInstance"):
vtk_object = vtk_object.vtkInstance
if vtk_object:
self.auto_set_results(vtk_object)
_modules = [vtkBaseInspector,
vtkDataSetInspector,
vtkDataSetAttributesInspector,
vtkDataArrayInspector,
vtkPolyDataInspector]
|
hjanime/VisTrails
|
vistrails/packages/vtk/inspectors.py
|
Python
|
bsd-3-clause
| 7,975
|
[
"VTK"
] |
d86c6413e3f8659667cfc93ce6966c4a35af286efe5fca7394a174e4a3bcf8be
|
#!/usr/bin/python2
'''
Makes list of all Drosophila species out of list of files, submits makeblastdb
for the .fasta files
'''
import subprocess
import os
def get_list_of_all_files():
'''Puts filenames of files in 02_raw_input into a list'''
p = subprocess.Popen(['ls', '../02_raw_input'], stdout=subprocess.PIPE)
return [item for item in p.communicate()[0].split('\n')]
def submit_makeblastdbs(list_):
'''Makes a blast db from mel_all_gene and mel_all_prot to blast to'''
for file_ in list_:
if 'all_gene' in file_ and file_.endswith('.fasta') and 'mel' in file_:
os.system('makeblastdb -in ../02_raw_input/{} \
-dbtype nucl -parse_seqids'.format(file_))
elif 'all_prot' in file_ and file_.endswith('.fasta') and 'mel' in file_:
os.system('makeblastdb -in ../02_raw_input/{} \
-dbtype prot -parse_seqids'.format(file_))
def make_species_list(list_):
'''makes list of all species whose files are in 02_raw_input & puts in list'''
with open('../02_raw_input/species_list.txt', 'w') as f:
species_list = list(set([file_.split('_')[0]
for file_ in list_ if file_]))
species_list = [spec for spec in species_list
if spec != 'mel' and spec != 'fbgn' and spec != 'species']
f.write('\n'.join(species_list))
print
print
print 'Species in species_list.txt:'
print '\n'.join(species_list)
list_ = get_list_of_all_files()
submit_makeblastdbs(list_)
make_species_list(list_)
|
CoderMatthias/ortholog-assignment
|
05_scripts/00_mk_db_N_sp_list.py
|
Python
|
gpl-2.0
| 1,598
|
[
"BLAST"
] |
b5e064efb5d3b1e0a7e82f7ef2f205f126266fa3d4d980830c7004ae7de0f62c
|
# Copyright 2010-2017, The University of Melbourne
# Copyright 2010-2017, Brian May
#
# This file is part of Karaage.
#
# Karaage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Karaage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Karaage If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
class Karaage(AppConfig):
name = 'karaage'
|
brianmay/karaage
|
karaage/apps.py
|
Python
|
gpl-3.0
| 821
|
[
"Brian"
] |
3489994e9b32a02c80dd6f0194d6b5e0772c41578c5efe504337fc443614312b
|
# Custom GPAW setup for Puhti (Bull cluster)
import os
# compiler
compiler = './gcc.py'
mpicompiler = './gcc.py'
mpilinker = 'mpicc'
extra_compile_args = ['-std=c99', '-O3', '-fopenmp-simd']
extra_link_args = ['-fno-lto']
# libz
libraries = ['z']
# libxc
library_dirs += [os.environ['LIBXCDIR'] + '/lib']
include_dirs += [os.environ['LIBXCDIR'] + '/include']
libraries += ['xc']
# MKL
libraries += ['mkl_intel_lp64' ,'mkl_sequential' ,'mkl_core']
mpi_libraries += ['mkl_scalapack_lp64', 'mkl_blacs_intelmpi_lp64']
# use ScaLAPACK and HDF5
scalapack = False
hdf5 = True
# GPAW defines
define_macros += [('GPAW_NO_UNDERSCORE_CBLACS', '1')]
define_macros += [('GPAW_NO_UNDERSCORE_CSCALAPACK', '1')]
define_macros += [("GPAW_ASYNC",1)]
define_macros += [("GPAW_MPI2",1)]
|
mlouhivu/build-recipes
|
gpaw-conda/setup/customize-puhti.py
|
Python
|
mit
| 774
|
[
"GPAW"
] |
d64052a0f1f0a78da1ca9da0d145f7e9975bf0467b282166d0fdd5a9e692e3a8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.